* gcc/testsuite/ada/acats/run_acats: Missed in last commit.
[official-gcc.git] / gcc / emit-rtl.c
blob98dec5f9c8d293a94da27e25b793d73afc7b2c8c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
59 /* Commonly used modes. */
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
70 static GTY(()) int label_num = 1;
72 /* Nonzero means do not generate NOTEs for source line numbers. */
74 static int no_line_numbers;
76 /* Commonly used rtx's, so that we only need space for one copy.
77 These are initialized once for the entire compilation.
78 All of these are unique; no other rtx-object will be equal to any
79 of these. */
81 rtx global_rtl[GR_MAX];
83 /* Commonly used RTL for hard registers. These objects are not necessarily
84 unique, so we allocate them separately from global_rtl. They are
85 initialized once per compilation unit, then copied into regno_reg_rtx
86 at the beginning of each function. */
87 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx. */
93 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
95 rtx const_true_rtx;
97 REAL_VALUE_TYPE dconst0;
98 REAL_VALUE_TYPE dconst1;
99 REAL_VALUE_TYPE dconst2;
100 REAL_VALUE_TYPE dconst3;
101 REAL_VALUE_TYPE dconst10;
102 REAL_VALUE_TYPE dconstm1;
103 REAL_VALUE_TYPE dconstm2;
104 REAL_VALUE_TYPE dconsthalf;
105 REAL_VALUE_TYPE dconstthird;
106 REAL_VALUE_TYPE dconstpi;
107 REAL_VALUE_TYPE dconste;
109 /* All references to the following fixed hard registers go through
110 these unique rtl objects. On machines where the frame-pointer and
111 arg-pointer are the same register, they use the same unique object.
113 After register allocation, other rtl objects which used to be pseudo-regs
114 may be clobbered to refer to the frame-pointer register.
115 But references that were originally to the frame-pointer can be
116 distinguished from the others because they contain frame_pointer_rtx.
118 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
119 tricky: until register elimination has taken place hard_frame_pointer_rtx
120 should be used if it is being set, and frame_pointer_rtx otherwise. After
121 register elimination hard_frame_pointer_rtx should always be used.
122 On machines where the two registers are same (most) then these are the
123 same.
125 In an inline procedure, the stack and frame pointer rtxs may not be
126 used for anything else. */
127 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131 /* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135 /* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
140 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142 /* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
150 htab_t mem_attrs_htab;
152 /* A hash table storing register attribute structures. */
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
154 htab_t reg_attrs_htab;
156 /* A hash table storing all CONST_DOUBLEs. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
158 htab_t const_double_htab;
160 #define first_insn (cfun->emit->x_first_insn)
161 #define last_insn (cfun->emit->x_last_insn)
162 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
163 #define last_location (cfun->emit->x_last_location)
164 #define first_label_num (cfun->emit->x_first_label_num)
166 static rtx make_jump_insn_raw (rtx);
167 static rtx make_call_insn_raw (rtx);
168 static rtx find_line_note (rtx);
169 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170 static void unshare_all_decls (tree);
171 static void reset_used_decls (tree);
172 static void mark_label_nuses (rtx);
173 static hashval_t const_int_htab_hash (const void *);
174 static int const_int_htab_eq (const void *, const void *);
175 static hashval_t const_double_htab_hash (const void *);
176 static int const_double_htab_eq (const void *, const void *);
177 static rtx lookup_const_double (rtx);
178 static hashval_t mem_attrs_htab_hash (const void *);
179 static int mem_attrs_htab_eq (const void *, const void *);
180 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181 enum machine_mode);
182 static hashval_t reg_attrs_htab_hash (const void *);
183 static int reg_attrs_htab_eq (const void *, const void *);
184 static reg_attrs *get_reg_attrs (tree, int);
185 static tree component_ref_for_mem_expr (tree);
186 static rtx gen_const_vector (enum machine_mode, int);
187 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
188 static void copy_rtx_if_shared_1 (rtx *orig);
190 /* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192 int split_branch_probability = -1;
194 /* Returns a hash code for X (which is a really a CONST_INT). */
196 static hashval_t
197 const_int_htab_hash (const void *x)
199 return (hashval_t) INTVAL ((rtx) x);
202 /* Returns nonzero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
206 static int
207 const_int_htab_eq (const void *x, const void *y)
209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
212 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
213 static hashval_t
214 const_double_htab_hash (const void *x)
216 rtx value = (rtx) x;
217 hashval_t h;
219 if (GET_MODE (value) == VOIDmode)
220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221 else
223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224 /* MODE is used in the comparison, so it should be in the hash. */
225 h ^= GET_MODE (value);
227 return h;
230 /* Returns nonzero if the value represented by X (really a ...)
231 is the same as that represented by Y (really a ...) */
232 static int
233 const_double_htab_eq (const void *x, const void *y)
235 rtx a = (rtx)x, b = (rtx)y;
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
239 if (GET_MODE (a) == VOIDmode)
240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242 else
243 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244 CONST_DOUBLE_REAL_VALUE (b));
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
249 static hashval_t
250 mem_attrs_htab_hash (const void *x)
252 mem_attrs *p = (mem_attrs *) x;
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) p->expr);
260 /* Returns nonzero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
264 static int
265 mem_attrs_htab_eq (const void *x, const void *y)
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
270 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
271 && p->size == q->size && p->align == q->align);
274 /* Allocate a new mem_attrs structure and insert it into the hash table if
275 one identical to it is not already in the table. We are doing this for
276 MEM of mode MODE. */
278 static mem_attrs *
279 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
280 unsigned int align, enum machine_mode mode)
282 mem_attrs attrs;
283 void **slot;
285 /* If everything is the default, we can just return zero.
286 This must match what the corresponding MEM_* macros return when the
287 field is not present. */
288 if (alias == 0 && expr == 0 && offset == 0
289 && (size == 0
290 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
291 && (STRICT_ALIGNMENT && mode != BLKmode
292 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
293 return 0;
295 attrs.alias = alias;
296 attrs.expr = expr;
297 attrs.offset = offset;
298 attrs.size = size;
299 attrs.align = align;
301 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
302 if (*slot == 0)
304 *slot = ggc_alloc (sizeof (mem_attrs));
305 memcpy (*slot, &attrs, sizeof (mem_attrs));
308 return *slot;
311 /* Returns a hash code for X (which is a really a reg_attrs *). */
313 static hashval_t
314 reg_attrs_htab_hash (const void *x)
316 reg_attrs *p = (reg_attrs *) x;
318 return ((p->offset * 1000) ^ (long) p->decl);
321 /* Returns nonzero if the value represented by X (which is really a
322 reg_attrs *) is the same as that given by Y (which is also really a
323 reg_attrs *). */
325 static int
326 reg_attrs_htab_eq (const void *x, const void *y)
328 reg_attrs *p = (reg_attrs *) x;
329 reg_attrs *q = (reg_attrs *) y;
331 return (p->decl == q->decl && p->offset == q->offset);
333 /* Allocate a new reg_attrs structure and insert it into the hash table if
334 one identical to it is not already in the table. We are doing this for
335 MEM of mode MODE. */
337 static reg_attrs *
338 get_reg_attrs (tree decl, int offset)
340 reg_attrs attrs;
341 void **slot;
343 /* If everything is the default, we can just return zero. */
344 if (decl == 0 && offset == 0)
345 return 0;
347 attrs.decl = decl;
348 attrs.offset = offset;
350 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
351 if (*slot == 0)
353 *slot = ggc_alloc (sizeof (reg_attrs));
354 memcpy (*slot, &attrs, sizeof (reg_attrs));
357 return *slot;
360 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
361 don't attempt to share with the various global pieces of rtl (such as
362 frame_pointer_rtx). */
365 gen_raw_REG (enum machine_mode mode, int regno)
367 rtx x = gen_rtx_raw_REG (mode, regno);
368 ORIGINAL_REGNO (x) = regno;
369 return x;
372 /* There are some RTL codes that require special attention; the generation
373 functions do the raw handling. If you add to this list, modify
374 special_rtx in gengenrtl.c as well. */
377 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
379 void **slot;
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
387 #endif
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
392 if (*slot == 0)
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
395 return (rtx) *slot;
399 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
401 return GEN_INT (trunc_int_for_mode (c, mode));
404 /* CONST_DOUBLEs might be created from pairs of integers, or from
405 REAL_VALUE_TYPEs. Also, their length is known only at run time,
406 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
408 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
409 hash table. If so, return its counterpart; otherwise add it
410 to the hash table and return it. */
411 static rtx
412 lookup_const_double (rtx real)
414 void **slot = htab_find_slot (const_double_htab, real, INSERT);
415 if (*slot == 0)
416 *slot = real;
418 return (rtx) *slot;
421 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
422 VALUE in mode MODE. */
424 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
426 rtx real = rtx_alloc (CONST_DOUBLE);
427 PUT_MODE (real, mode);
429 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
431 return lookup_const_double (real);
434 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
435 of ints: I0 is the low-order word and I1 is the high-order word.
436 Do not use this routine for non-integer modes; convert to
437 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
440 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
442 rtx value;
443 unsigned int i;
445 if (mode != VOIDmode)
447 int width;
449 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
450 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
451 /* We can get a 0 for an error mark. */
452 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
453 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
455 /* We clear out all bits that don't belong in MODE, unless they and
456 our sign bit are all one. So we get either a reasonable negative
457 value or a reasonable unsigned value for this mode. */
458 width = GET_MODE_BITSIZE (mode);
459 if (width < HOST_BITS_PER_WIDE_INT
460 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
461 != ((HOST_WIDE_INT) (-1) << (width - 1))))
462 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
463 else if (width == HOST_BITS_PER_WIDE_INT
464 && ! (i1 == ~0 && i0 < 0))
465 i1 = 0;
466 else
467 /* We should be able to represent this value as a constant. */
468 gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
470 /* If this would be an entire word for the target, but is not for
471 the host, then sign-extend on the host so that the number will
472 look the same way on the host that it would on the target.
474 For example, when building a 64 bit alpha hosted 32 bit sparc
475 targeted compiler, then we want the 32 bit unsigned value -1 to be
476 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
477 The latter confuses the sparc backend. */
479 if (width < HOST_BITS_PER_WIDE_INT
480 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
481 i0 |= ((HOST_WIDE_INT) (-1) << width);
483 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
484 CONST_INT.
486 ??? Strictly speaking, this is wrong if we create a CONST_INT for
487 a large unsigned constant with the size of MODE being
488 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
489 in a wider mode. In that case we will mis-interpret it as a
490 negative number.
492 Unfortunately, the only alternative is to make a CONST_DOUBLE for
493 any constant in any mode if it is an unsigned constant larger
494 than the maximum signed integer in an int on the host. However,
495 doing this will break everyone that always expects to see a
496 CONST_INT for SImode and smaller.
498 We have always been making CONST_INTs in this case, so nothing
499 new is being broken. */
501 if (width <= HOST_BITS_PER_WIDE_INT)
502 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
505 /* If this integer fits in one word, return a CONST_INT. */
506 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
507 return GEN_INT (i0);
509 /* We use VOIDmode for integers. */
510 value = rtx_alloc (CONST_DOUBLE);
511 PUT_MODE (value, VOIDmode);
513 CONST_DOUBLE_LOW (value) = i0;
514 CONST_DOUBLE_HIGH (value) = i1;
516 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
517 XWINT (value, i) = 0;
519 return lookup_const_double (value);
523 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
525 /* In case the MD file explicitly references the frame pointer, have
526 all such references point to the same frame pointer. This is
527 used during frame pointer elimination to distinguish the explicit
528 references to these registers from pseudos that happened to be
529 assigned to them.
531 If we have eliminated the frame pointer or arg pointer, we will
532 be using it as a normal register, for example as a spill
533 register. In such cases, we might be accessing it in a mode that
534 is not Pmode and therefore cannot use the pre-allocated rtx.
536 Also don't do this when we are making new REGs in reload, since
537 we don't want to get confused with the real pointers. */
539 if (mode == Pmode && !reload_in_progress)
541 if (regno == FRAME_POINTER_REGNUM
542 && (!reload_completed || frame_pointer_needed))
543 return frame_pointer_rtx;
544 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
545 if (regno == HARD_FRAME_POINTER_REGNUM
546 && (!reload_completed || frame_pointer_needed))
547 return hard_frame_pointer_rtx;
548 #endif
549 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
550 if (regno == ARG_POINTER_REGNUM)
551 return arg_pointer_rtx;
552 #endif
553 #ifdef RETURN_ADDRESS_POINTER_REGNUM
554 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
555 return return_address_pointer_rtx;
556 #endif
557 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
558 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
559 return pic_offset_table_rtx;
560 if (regno == STACK_POINTER_REGNUM)
561 return stack_pointer_rtx;
564 #if 0
565 /* If the per-function register table has been set up, try to re-use
566 an existing entry in that table to avoid useless generation of RTL.
568 This code is disabled for now until we can fix the various backends
569 which depend on having non-shared hard registers in some cases. Long
570 term we want to re-enable this code as it can significantly cut down
571 on the amount of useless RTL that gets generated.
573 We'll also need to fix some code that runs after reload that wants to
574 set ORIGINAL_REGNO. */
576 if (cfun
577 && cfun->emit
578 && regno_reg_rtx
579 && regno < FIRST_PSEUDO_REGISTER
580 && reg_raw_mode[regno] == mode)
581 return regno_reg_rtx[regno];
582 #endif
584 return gen_raw_REG (mode, regno);
588 gen_rtx_MEM (enum machine_mode mode, rtx addr)
590 rtx rt = gen_rtx_raw_MEM (mode, addr);
592 /* This field is not cleared by the mere allocation of the rtx, so
593 we clear it here. */
594 MEM_ATTRS (rt) = 0;
596 return rt;
599 /* Generate a memory referring to non-trapping constant memory. */
602 gen_const_mem (enum machine_mode mode, rtx addr)
604 rtx mem = gen_rtx_MEM (mode, addr);
605 MEM_READONLY_P (mem) = 1;
606 MEM_NOTRAP_P (mem) = 1;
607 return mem;
610 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
611 this construct would be valid, and false otherwise. */
613 bool
614 validate_subreg (enum machine_mode omode, enum machine_mode imode,
615 rtx reg, unsigned int offset)
617 unsigned int isize = GET_MODE_SIZE (imode);
618 unsigned int osize = GET_MODE_SIZE (omode);
620 /* All subregs must be aligned. */
621 if (offset % osize != 0)
622 return false;
624 /* The subreg offset cannot be outside the inner object. */
625 if (offset >= isize)
626 return false;
628 /* ??? This should not be here. Temporarily continue to allow word_mode
629 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
630 Generally, backends are doing something sketchy but it'll take time to
631 fix them all. */
632 if (omode == word_mode)
634 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
635 is the culprit here, and not the backends. */
636 else if (osize >= UNITS_PER_WORD && isize >= osize)
638 /* Allow component subregs of complex and vector. Though given the below
639 extraction rules, it's not always clear what that means. */
640 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
641 && GET_MODE_INNER (imode) == omode)
643 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
644 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
645 represent this. It's questionable if this ought to be represented at
646 all -- why can't this all be hidden in post-reload splitters that make
647 arbitrarily mode changes to the registers themselves. */
648 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
650 /* Subregs involving floating point modes are not allowed to
651 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
652 (subreg:SI (reg:DF) 0) isn't. */
653 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
655 if (isize != osize)
656 return false;
659 /* Paradoxical subregs must have offset zero. */
660 if (osize > isize)
661 return offset == 0;
663 /* This is a normal subreg. Verify that the offset is representable. */
665 /* For hard registers, we already have most of these rules collected in
666 subreg_offset_representable_p. */
667 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
669 unsigned int regno = REGNO (reg);
671 #ifdef CANNOT_CHANGE_MODE_CLASS
672 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
673 && GET_MODE_INNER (imode) == omode)
675 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
676 return false;
677 #endif
679 return subreg_offset_representable_p (regno, imode, offset, omode);
682 /* For pseudo registers, we want most of the same checks. Namely:
683 If the register no larger than a word, the subreg must be lowpart.
684 If the register is larger than a word, the subreg must be the lowpart
685 of a subword. A subreg does *not* perform arbitrary bit extraction.
686 Given that we've already checked mode/offset alignment, we only have
687 to check subword subregs here. */
688 if (osize < UNITS_PER_WORD)
690 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
691 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
692 if (offset % UNITS_PER_WORD != low_off)
693 return false;
695 return true;
699 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
701 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
702 return gen_rtx_raw_SUBREG (mode, reg, offset);
705 /* Generate a SUBREG representing the least-significant part of REG if MODE
706 is smaller than mode of REG, otherwise paradoxical SUBREG. */
709 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
711 enum machine_mode inmode;
713 inmode = GET_MODE (reg);
714 if (inmode == VOIDmode)
715 inmode = mode;
716 return gen_rtx_SUBREG (mode, reg,
717 subreg_lowpart_offset (mode, inmode));
720 /* gen_rtvec (n, [rt1, ..., rtn])
722 ** This routine creates an rtvec and stores within it the
723 ** pointers to rtx's which are its arguments.
726 /*VARARGS1*/
727 rtvec
728 gen_rtvec (int n, ...)
730 int i, save_n;
731 rtx *vector;
732 va_list p;
734 va_start (p, n);
736 if (n == 0)
737 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
739 vector = alloca (n * sizeof (rtx));
741 for (i = 0; i < n; i++)
742 vector[i] = va_arg (p, rtx);
744 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
745 save_n = n;
746 va_end (p);
748 return gen_rtvec_v (save_n, vector);
751 rtvec
752 gen_rtvec_v (int n, rtx *argp)
754 int i;
755 rtvec rt_val;
757 if (n == 0)
758 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
760 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
762 for (i = 0; i < n; i++)
763 rt_val->elem[i] = *argp++;
765 return rt_val;
768 /* Generate a REG rtx for a new pseudo register of mode MODE.
769 This pseudo is assigned the next sequential register number. */
772 gen_reg_rtx (enum machine_mode mode)
774 struct function *f = cfun;
775 rtx val;
777 /* Don't let anything called after initial flow analysis create new
778 registers. */
779 gcc_assert (!no_new_pseudos);
781 if (generating_concat_p
782 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
783 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
785 /* For complex modes, don't make a single pseudo.
786 Instead, make a CONCAT of two pseudos.
787 This allows noncontiguous allocation of the real and imaginary parts,
788 which makes much better code. Besides, allocating DCmode
789 pseudos overstrains reload on some machines like the 386. */
790 rtx realpart, imagpart;
791 enum machine_mode partmode = GET_MODE_INNER (mode);
793 realpart = gen_reg_rtx (partmode);
794 imagpart = gen_reg_rtx (partmode);
795 return gen_rtx_CONCAT (mode, realpart, imagpart);
798 /* Make sure regno_pointer_align, and regno_reg_rtx are large
799 enough to have an element for this pseudo reg number. */
801 if (reg_rtx_no == f->emit->regno_pointer_align_length)
803 int old_size = f->emit->regno_pointer_align_length;
804 char *new;
805 rtx *new1;
807 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
808 memset (new + old_size, 0, old_size);
809 f->emit->regno_pointer_align = (unsigned char *) new;
811 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
812 old_size * 2 * sizeof (rtx));
813 memset (new1 + old_size, 0, old_size * sizeof (rtx));
814 regno_reg_rtx = new1;
816 f->emit->regno_pointer_align_length = old_size * 2;
819 val = gen_raw_REG (mode, reg_rtx_no);
820 regno_reg_rtx[reg_rtx_no++] = val;
821 return val;
824 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
825 Do the big endian correction if needed. */
828 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
830 rtx new = gen_rtx_REG (mode, regno);
831 tree decl;
832 HOST_WIDE_INT var_size;
834 /* PR middle-end/14084
835 The problem appears when a variable is stored in a larger register
836 and later it is used in the original mode or some mode in between
837 or some part of variable is accessed.
839 On little endian machines there is no problem because
840 the REG_OFFSET of the start of the variable is the same when
841 accessed in any mode (it is 0).
843 However, this is not true on big endian machines.
844 The offset of the start of the variable is different when accessed
845 in different modes.
846 When we are taking a part of the REG we have to change the OFFSET
847 from offset WRT size of mode of REG to offset WRT size of variable.
849 If we would not do the big endian correction the resulting REG_OFFSET
850 would be larger than the size of the DECL.
852 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
854 REG.mode MODE DECL size old offset new offset description
855 DI SI 4 4 0 int32 in SImode
856 DI SI 1 4 0 char in SImode
857 DI QI 1 7 0 char in QImode
858 DI QI 4 5 1 1st element in QImode
859 of char[4]
860 DI HI 4 6 2 1st element in HImode
861 of int16[2]
863 If the size of DECL is equal or greater than the size of REG
864 we can't do this correction because the register holds the
865 whole variable or a part of the variable and thus the REG_OFFSET
866 is already correct. */
868 decl = REG_EXPR (reg);
869 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
870 && decl != NULL
871 && offset > 0
872 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
873 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
876 int offset_le;
878 /* Convert machine endian to little endian WRT size of mode of REG. */
879 if (WORDS_BIG_ENDIAN)
880 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
881 / UNITS_PER_WORD) * UNITS_PER_WORD;
882 else
883 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
885 if (BYTES_BIG_ENDIAN)
886 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
887 % UNITS_PER_WORD);
888 else
889 offset_le += offset % UNITS_PER_WORD;
891 if (offset_le >= var_size)
893 /* MODE is wider than the variable so the new reg will cover
894 the whole variable so the resulting OFFSET should be 0. */
895 offset = 0;
897 else
899 /* Convert little endian to machine endian WRT size of variable. */
900 if (WORDS_BIG_ENDIAN)
901 offset = ((var_size - 1 - offset_le)
902 / UNITS_PER_WORD) * UNITS_PER_WORD;
903 else
904 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
906 if (BYTES_BIG_ENDIAN)
907 offset += ((var_size - 1 - offset_le)
908 % UNITS_PER_WORD);
909 else
910 offset += offset_le % UNITS_PER_WORD;
914 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
915 REG_OFFSET (reg) + offset);
916 return new;
919 /* Set the decl for MEM to DECL. */
921 void
922 set_reg_attrs_from_mem (rtx reg, rtx mem)
924 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
925 REG_ATTRS (reg)
926 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
929 /* Set the register attributes for registers contained in PARM_RTX.
930 Use needed values from memory attributes of MEM. */
932 void
933 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
935 if (REG_P (parm_rtx))
936 set_reg_attrs_from_mem (parm_rtx, mem);
937 else if (GET_CODE (parm_rtx) == PARALLEL)
939 /* Check for a NULL entry in the first slot, used to indicate that the
940 parameter goes both on the stack and in registers. */
941 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
942 for (; i < XVECLEN (parm_rtx, 0); i++)
944 rtx x = XVECEXP (parm_rtx, 0, i);
945 if (REG_P (XEXP (x, 0)))
946 REG_ATTRS (XEXP (x, 0))
947 = get_reg_attrs (MEM_EXPR (mem),
948 INTVAL (XEXP (x, 1)));
953 /* Assign the RTX X to declaration T. */
954 void
955 set_decl_rtl (tree t, rtx x)
957 DECL_CHECK (t)->decl.rtl = x;
959 if (!x)
960 return;
961 /* For register, we maintain the reverse information too. */
962 if (REG_P (x))
963 REG_ATTRS (x) = get_reg_attrs (t, 0);
964 else if (GET_CODE (x) == SUBREG)
965 REG_ATTRS (SUBREG_REG (x))
966 = get_reg_attrs (t, -SUBREG_BYTE (x));
967 if (GET_CODE (x) == CONCAT)
969 if (REG_P (XEXP (x, 0)))
970 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
971 if (REG_P (XEXP (x, 1)))
972 REG_ATTRS (XEXP (x, 1))
973 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
975 if (GET_CODE (x) == PARALLEL)
977 int i;
978 for (i = 0; i < XVECLEN (x, 0); i++)
980 rtx y = XVECEXP (x, 0, i);
981 if (REG_P (XEXP (y, 0)))
982 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
987 /* Assign the RTX X to parameter declaration T. */
988 void
989 set_decl_incoming_rtl (tree t, rtx x)
991 DECL_INCOMING_RTL (t) = x;
993 if (!x)
994 return;
995 /* For register, we maintain the reverse information too. */
996 if (REG_P (x))
997 REG_ATTRS (x) = get_reg_attrs (t, 0);
998 else if (GET_CODE (x) == SUBREG)
999 REG_ATTRS (SUBREG_REG (x))
1000 = get_reg_attrs (t, -SUBREG_BYTE (x));
1001 if (GET_CODE (x) == CONCAT)
1003 if (REG_P (XEXP (x, 0)))
1004 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1005 if (REG_P (XEXP (x, 1)))
1006 REG_ATTRS (XEXP (x, 1))
1007 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1009 if (GET_CODE (x) == PARALLEL)
1011 int i, start;
1013 /* Check for a NULL entry, used to indicate that the parameter goes
1014 both on the stack and in registers. */
1015 if (XEXP (XVECEXP (x, 0, 0), 0))
1016 start = 0;
1017 else
1018 start = 1;
1020 for (i = start; i < XVECLEN (x, 0); i++)
1022 rtx y = XVECEXP (x, 0, i);
1023 if (REG_P (XEXP (y, 0)))
1024 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1029 /* Identify REG (which may be a CONCAT) as a user register. */
1031 void
1032 mark_user_reg (rtx reg)
1034 if (GET_CODE (reg) == CONCAT)
1036 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1037 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1039 else
1041 gcc_assert (REG_P (reg));
1042 REG_USERVAR_P (reg) = 1;
1046 /* Identify REG as a probable pointer register and show its alignment
1047 as ALIGN, if nonzero. */
1049 void
1050 mark_reg_pointer (rtx reg, int align)
1052 if (! REG_POINTER (reg))
1054 REG_POINTER (reg) = 1;
1056 if (align)
1057 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1059 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1060 /* We can no-longer be sure just how aligned this pointer is. */
1061 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1064 /* Return 1 plus largest pseudo reg number used in the current function. */
1067 max_reg_num (void)
1069 return reg_rtx_no;
1072 /* Return 1 + the largest label number used so far in the current function. */
1075 max_label_num (void)
1077 return label_num;
1080 /* Return first label number used in this function (if any were used). */
1083 get_first_label_num (void)
1085 return first_label_num;
1088 /* If the rtx for label was created during the expansion of a nested
1089 function, then first_label_num won't include this label number.
1090 Fix this now so that array indicies work later. */
1092 void
1093 maybe_set_first_label_num (rtx x)
1095 if (CODE_LABEL_NUMBER (x) < first_label_num)
1096 first_label_num = CODE_LABEL_NUMBER (x);
1099 /* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
1101 between floating-point and fixed-point values, rather, the bit
1102 representation is returned.
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1108 If this is not a case we can handle, return 0. */
1111 gen_lowpart_common (enum machine_mode mode, rtx x)
1113 int msize = GET_MODE_SIZE (mode);
1114 int xsize;
1115 int offset = 0;
1116 enum machine_mode innermode;
1118 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1119 so we have to make one up. Yuk. */
1120 innermode = GET_MODE (x);
1121 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1122 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1123 else if (innermode == VOIDmode)
1124 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1126 xsize = GET_MODE_SIZE (innermode);
1128 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1130 if (innermode == mode)
1131 return x;
1133 /* MODE must occupy no more words than the mode of X. */
1134 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1135 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1136 return 0;
1138 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1139 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1140 return 0;
1142 offset = subreg_lowpart_offset (mode, innermode);
1144 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1145 && (GET_MODE_CLASS (mode) == MODE_INT
1146 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1148 /* If we are getting the low-order part of something that has been
1149 sign- or zero-extended, we can either just use the object being
1150 extended or make a narrower extension. If we want an even smaller
1151 piece than the size of the object being extended, call ourselves
1152 recursively.
1154 This case is used mostly by combine and cse. */
1156 if (GET_MODE (XEXP (x, 0)) == mode)
1157 return XEXP (x, 0);
1158 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1159 return gen_lowpart_common (mode, XEXP (x, 0));
1160 else if (msize < xsize)
1161 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1163 else if (GET_CODE (x) == SUBREG || REG_P (x)
1164 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1165 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1166 return simplify_gen_subreg (mode, x, innermode, offset);
1168 /* Otherwise, we can't do this. */
1169 return 0;
1172 /* Return the constant real or imaginary part (which has mode MODE)
1173 of a complex value X. The IMAGPART_P argument determines whether
1174 the real or complex component should be returned. This function
1175 returns NULL_RTX if the component isn't a constant. */
1177 static rtx
1178 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1180 tree decl, part;
1182 if (MEM_P (x)
1183 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1185 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1186 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1188 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1189 if (TREE_CODE (part) == REAL_CST
1190 || TREE_CODE (part) == INTEGER_CST)
1191 return expand_expr (part, NULL_RTX, mode, 0);
1194 return NULL_RTX;
1197 /* Return the real part (which has mode MODE) of a complex value X.
1198 This always comes at the low address in memory. */
1201 gen_realpart (enum machine_mode mode, rtx x)
1203 rtx part;
1205 /* Handle complex constants. */
1206 part = gen_complex_constant_part (mode, x, 0);
1207 if (part != NULL_RTX)
1208 return part;
1210 if (WORDS_BIG_ENDIAN
1211 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1212 && REG_P (x)
1213 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1214 internal_error
1215 ("can't access real part of complex value in hard register");
1216 else if (WORDS_BIG_ENDIAN)
1217 return gen_highpart (mode, x);
1218 else
1219 return gen_lowpart (mode, x);
1222 /* Return the imaginary part (which has mode MODE) of a complex value X.
1223 This always comes at the high address in memory. */
1226 gen_imagpart (enum machine_mode mode, rtx x)
1228 rtx part;
1230 /* Handle complex constants. */
1231 part = gen_complex_constant_part (mode, x, 1);
1232 if (part != NULL_RTX)
1233 return part;
1235 if (WORDS_BIG_ENDIAN)
1236 return gen_lowpart (mode, x);
1237 else if (! WORDS_BIG_ENDIAN
1238 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1239 && REG_P (x)
1240 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1241 internal_error
1242 ("can't access imaginary part of complex value in hard register");
1243 else
1244 return gen_highpart (mode, x);
1248 gen_highpart (enum machine_mode mode, rtx x)
1250 unsigned int msize = GET_MODE_SIZE (mode);
1251 rtx result;
1253 /* This case loses if X is a subreg. To catch bugs early,
1254 complain if an invalid MODE is used even in other cases. */
1255 gcc_assert (msize <= UNITS_PER_WORD
1256 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1258 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1259 subreg_highpart_offset (mode, GET_MODE (x)));
1260 gcc_assert (result);
1262 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1263 the target if we have a MEM. gen_highpart must return a valid operand,
1264 emitting code if necessary to do so. */
1265 if (MEM_P (result))
1267 result = validize_mem (result);
1268 gcc_assert (result);
1271 return result;
1274 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1275 be VOIDmode constant. */
1277 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1279 if (GET_MODE (exp) != VOIDmode)
1281 gcc_assert (GET_MODE (exp) == innermode);
1282 return gen_highpart (outermode, exp);
1284 return simplify_gen_subreg (outermode, exp, innermode,
1285 subreg_highpart_offset (outermode, innermode));
1288 /* Return offset in bytes to get OUTERMODE low part
1289 of the value in mode INNERMODE stored in memory in target format. */
1291 unsigned int
1292 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1294 unsigned int offset = 0;
1295 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1297 if (difference > 0)
1299 if (WORDS_BIG_ENDIAN)
1300 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1301 if (BYTES_BIG_ENDIAN)
1302 offset += difference % UNITS_PER_WORD;
1305 return offset;
1308 /* Return offset in bytes to get OUTERMODE high part
1309 of the value in mode INNERMODE stored in memory in target format. */
1310 unsigned int
1311 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1313 unsigned int offset = 0;
1314 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1316 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1318 if (difference > 0)
1320 if (! WORDS_BIG_ENDIAN)
1321 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1322 if (! BYTES_BIG_ENDIAN)
1323 offset += difference % UNITS_PER_WORD;
1326 return offset;
1329 /* Return 1 iff X, assumed to be a SUBREG,
1330 refers to the least significant part of its containing reg.
1331 If X is not a SUBREG, always return 1 (it is its own low part!). */
1334 subreg_lowpart_p (rtx x)
1336 if (GET_CODE (x) != SUBREG)
1337 return 1;
1338 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1339 return 0;
1341 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1342 == SUBREG_BYTE (x));
1345 /* Return subword OFFSET of operand OP.
1346 The word number, OFFSET, is interpreted as the word number starting
1347 at the low-order address. OFFSET 0 is the low-order word if not
1348 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1350 If we cannot extract the required word, we return zero. Otherwise,
1351 an rtx corresponding to the requested word will be returned.
1353 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1354 reload has completed, a valid address will always be returned. After
1355 reload, if a valid address cannot be returned, we return zero.
1357 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1358 it is the responsibility of the caller.
1360 MODE is the mode of OP in case it is a CONST_INT.
1362 ??? This is still rather broken for some cases. The problem for the
1363 moment is that all callers of this thing provide no 'goal mode' to
1364 tell us to work with. This exists because all callers were written
1365 in a word based SUBREG world.
1366 Now use of this function can be deprecated by simplify_subreg in most
1367 cases.
1371 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1373 if (mode == VOIDmode)
1374 mode = GET_MODE (op);
1376 gcc_assert (mode != VOIDmode);
1378 /* If OP is narrower than a word, fail. */
1379 if (mode != BLKmode
1380 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1381 return 0;
1383 /* If we want a word outside OP, return zero. */
1384 if (mode != BLKmode
1385 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1386 return const0_rtx;
1388 /* Form a new MEM at the requested address. */
1389 if (MEM_P (op))
1391 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1393 if (! validate_address)
1394 return new;
1396 else if (reload_completed)
1398 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1399 return 0;
1401 else
1402 return replace_equiv_address (new, XEXP (new, 0));
1405 /* Rest can be handled by simplify_subreg. */
1406 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1409 /* Similar to `operand_subword', but never return 0. If we can't extract
1410 the required subword, put OP into a register and try again. If that fails,
1411 abort. We always validate the address in this case.
1413 MODE is the mode of OP, in case it is CONST_INT. */
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1418 rtx result = operand_subword (op, offset, 1, mode);
1420 if (result)
1421 return result;
1423 if (mode != BLKmode && mode != VOIDmode)
1425 /* If this is a register which can not be accessed by words, copy it
1426 to a pseudo register. */
1427 if (REG_P (op))
1428 op = copy_to_reg (op);
1429 else
1430 op = force_reg (mode, op);
1433 result = operand_subword (op, offset, 1, mode);
1434 gcc_assert (result);
1436 return result;
1439 /* Given a compare instruction, swap the operands.
1440 A test instruction is changed into a compare of 0 against the operand. */
1442 void
1443 reverse_comparison (rtx insn)
1445 rtx body = PATTERN (insn);
1446 rtx comp;
1448 if (GET_CODE (body) == SET)
1449 comp = SET_SRC (body);
1450 else
1451 comp = SET_SRC (XVECEXP (body, 0, 0));
1453 if (GET_CODE (comp) == COMPARE)
1455 rtx op0 = XEXP (comp, 0);
1456 rtx op1 = XEXP (comp, 1);
1457 XEXP (comp, 0) = op1;
1458 XEXP (comp, 1) = op0;
1460 else
1462 rtx new = gen_rtx_COMPARE (VOIDmode,
1463 CONST0_RTX (GET_MODE (comp)), comp);
1464 if (GET_CODE (body) == SET)
1465 SET_SRC (body) = new;
1466 else
1467 SET_SRC (XVECEXP (body, 0, 0)) = new;
1471 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1472 or (2) a component ref of something variable. Represent the later with
1473 a NULL expression. */
1475 static tree
1476 component_ref_for_mem_expr (tree ref)
1478 tree inner = TREE_OPERAND (ref, 0);
1480 if (TREE_CODE (inner) == COMPONENT_REF)
1481 inner = component_ref_for_mem_expr (inner);
1482 else
1484 /* Now remove any conversions: they don't change what the underlying
1485 object is. Likewise for SAVE_EXPR. */
1486 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1487 || TREE_CODE (inner) == NON_LVALUE_EXPR
1488 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1489 || TREE_CODE (inner) == SAVE_EXPR)
1490 inner = TREE_OPERAND (inner, 0);
1492 if (! DECL_P (inner))
1493 inner = NULL_TREE;
1496 if (inner == TREE_OPERAND (ref, 0))
1497 return ref;
1498 else
1499 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1500 TREE_OPERAND (ref, 1), NULL_TREE);
1503 /* Returns 1 if both MEM_EXPR can be considered equal
1504 and 0 otherwise. */
1507 mem_expr_equal_p (tree expr1, tree expr2)
1509 if (expr1 == expr2)
1510 return 1;
1512 if (! expr1 || ! expr2)
1513 return 0;
1515 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1516 return 0;
1518 if (TREE_CODE (expr1) == COMPONENT_REF)
1519 return
1520 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1521 TREE_OPERAND (expr2, 0))
1522 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1523 TREE_OPERAND (expr2, 1));
1525 if (INDIRECT_REF_P (expr1))
1526 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1527 TREE_OPERAND (expr2, 0));
1529 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1530 have been resolved here. */
1531 gcc_assert (DECL_P (expr1));
1533 /* Decls with different pointers can't be equal. */
1534 return 0;
1537 /* Given REF, a MEM, and T, either the type of X or the expression
1538 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1539 if we are making a new object of this type. BITPOS is nonzero if
1540 there is an offset outstanding on T that will be applied later. */
1542 void
1543 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1544 HOST_WIDE_INT bitpos)
1546 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1547 tree expr = MEM_EXPR (ref);
1548 rtx offset = MEM_OFFSET (ref);
1549 rtx size = MEM_SIZE (ref);
1550 unsigned int align = MEM_ALIGN (ref);
1551 HOST_WIDE_INT apply_bitpos = 0;
1552 tree type;
1554 /* It can happen that type_for_mode was given a mode for which there
1555 is no language-level type. In which case it returns NULL, which
1556 we can see here. */
1557 if (t == NULL_TREE)
1558 return;
1560 type = TYPE_P (t) ? t : TREE_TYPE (t);
1561 if (type == error_mark_node)
1562 return;
1564 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1565 wrong answer, as it assumes that DECL_RTL already has the right alias
1566 info. Callers should not set DECL_RTL until after the call to
1567 set_mem_attributes. */
1568 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1570 /* Get the alias set from the expression or type (perhaps using a
1571 front-end routine) and use it. */
1572 alias = get_alias_set (t);
1574 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1575 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1576 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1577 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
1579 /* If we are making an object of this type, or if this is a DECL, we know
1580 that it is a scalar if the type is not an aggregate. */
1581 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1582 MEM_SCALAR_P (ref) = 1;
1584 /* We can set the alignment from the type if we are making an object,
1585 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1586 if (objectp || TREE_CODE (t) == INDIRECT_REF
1587 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1588 || TYPE_ALIGN_OK (type))
1589 align = MAX (align, TYPE_ALIGN (type));
1590 else
1591 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1593 if (integer_zerop (TREE_OPERAND (t, 1)))
1594 /* We don't know anything about the alignment. */
1595 align = BITS_PER_UNIT;
1596 else
1597 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1600 /* If the size is known, we can set that. */
1601 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1602 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1604 /* If T is not a type, we may be able to deduce some more information about
1605 the expression. */
1606 if (! TYPE_P (t))
1608 tree base = get_base_address (t);
1609 if (base && DECL_P (base)
1610 && TREE_READONLY (base)
1611 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1612 MEM_READONLY_P (ref) = 1;
1614 if (TREE_THIS_VOLATILE (t))
1615 MEM_VOLATILE_P (ref) = 1;
1617 /* Now remove any conversions: they don't change what the underlying
1618 object is. Likewise for SAVE_EXPR. */
1619 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1620 || TREE_CODE (t) == NON_LVALUE_EXPR
1621 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1622 || TREE_CODE (t) == SAVE_EXPR)
1623 t = TREE_OPERAND (t, 0);
1625 /* If this expression can't be addressed (e.g., it contains a reference
1626 to a non-addressable field), show we don't change its alias set. */
1627 if (! can_address_p (t))
1628 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1630 /* If this is a decl, set the attributes of the MEM from it. */
1631 if (DECL_P (t))
1633 expr = t;
1634 offset = const0_rtx;
1635 apply_bitpos = bitpos;
1636 size = (DECL_SIZE_UNIT (t)
1637 && host_integerp (DECL_SIZE_UNIT (t), 1)
1638 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1639 align = DECL_ALIGN (t);
1642 /* If this is a constant, we know the alignment. */
1643 else if (CONSTANT_CLASS_P (t))
1645 align = TYPE_ALIGN (type);
1646 #ifdef CONSTANT_ALIGNMENT
1647 align = CONSTANT_ALIGNMENT (t, align);
1648 #endif
1651 /* If this is a field reference and not a bit-field, record it. */
1652 /* ??? There is some information that can be gleened from bit-fields,
1653 such as the word offset in the structure that might be modified.
1654 But skip it for now. */
1655 else if (TREE_CODE (t) == COMPONENT_REF
1656 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1658 expr = component_ref_for_mem_expr (t);
1659 offset = const0_rtx;
1660 apply_bitpos = bitpos;
1661 /* ??? Any reason the field size would be different than
1662 the size we got from the type? */
1665 /* If this is an array reference, look for an outer field reference. */
1666 else if (TREE_CODE (t) == ARRAY_REF)
1668 tree off_tree = size_zero_node;
1669 /* We can't modify t, because we use it at the end of the
1670 function. */
1671 tree t2 = t;
1675 tree index = TREE_OPERAND (t2, 1);
1676 tree low_bound = array_ref_low_bound (t2);
1677 tree unit_size = array_ref_element_size (t2);
1679 /* We assume all arrays have sizes that are a multiple of a byte.
1680 First subtract the lower bound, if any, in the type of the
1681 index, then convert to sizetype and multiply by the size of
1682 the array element. */
1683 if (! integer_zerop (low_bound))
1684 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
1685 index, low_bound));
1687 off_tree = size_binop (PLUS_EXPR,
1688 size_binop (MULT_EXPR, convert (sizetype,
1689 index),
1690 unit_size),
1691 off_tree);
1692 t2 = TREE_OPERAND (t2, 0);
1694 while (TREE_CODE (t2) == ARRAY_REF);
1696 if (DECL_P (t2))
1698 expr = t2;
1699 offset = NULL;
1700 if (host_integerp (off_tree, 1))
1702 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1703 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1704 align = DECL_ALIGN (t2);
1705 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1706 align = aoff;
1707 offset = GEN_INT (ioff);
1708 apply_bitpos = bitpos;
1711 else if (TREE_CODE (t2) == COMPONENT_REF)
1713 expr = component_ref_for_mem_expr (t2);
1714 if (host_integerp (off_tree, 1))
1716 offset = GEN_INT (tree_low_cst (off_tree, 1));
1717 apply_bitpos = bitpos;
1719 /* ??? Any reason the field size would be different than
1720 the size we got from the type? */
1722 else if (flag_argument_noalias > 1
1723 && (INDIRECT_REF_P (t2))
1724 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1726 expr = t2;
1727 offset = NULL;
1731 /* If this is a Fortran indirect argument reference, record the
1732 parameter decl. */
1733 else if (flag_argument_noalias > 1
1734 && (INDIRECT_REF_P (t))
1735 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1737 expr = t;
1738 offset = NULL;
1742 /* If we modified OFFSET based on T, then subtract the outstanding
1743 bit position offset. Similarly, increase the size of the accessed
1744 object to contain the negative offset. */
1745 if (apply_bitpos)
1747 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1748 if (size)
1749 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1752 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1754 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1755 we're overlapping. */
1756 offset = NULL;
1757 expr = NULL;
1760 /* Now set the attributes we computed above. */
1761 MEM_ATTRS (ref)
1762 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1764 /* If this is already known to be a scalar or aggregate, we are done. */
1765 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1766 return;
1768 /* If it is a reference into an aggregate, this is part of an aggregate.
1769 Otherwise we don't know. */
1770 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1771 || TREE_CODE (t) == ARRAY_RANGE_REF
1772 || TREE_CODE (t) == BIT_FIELD_REF)
1773 MEM_IN_STRUCT_P (ref) = 1;
1776 void
1777 set_mem_attributes (rtx ref, tree t, int objectp)
1779 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1782 /* Set the decl for MEM to DECL. */
1784 void
1785 set_mem_attrs_from_reg (rtx mem, rtx reg)
1787 MEM_ATTRS (mem)
1788 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1789 GEN_INT (REG_OFFSET (reg)),
1790 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1793 /* Set the alias set of MEM to SET. */
1795 void
1796 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1798 #ifdef ENABLE_CHECKING
1799 /* If the new and old alias sets don't conflict, something is wrong. */
1800 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1801 #endif
1803 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1804 MEM_SIZE (mem), MEM_ALIGN (mem),
1805 GET_MODE (mem));
1808 /* Set the alignment of MEM to ALIGN bits. */
1810 void
1811 set_mem_align (rtx mem, unsigned int align)
1813 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1814 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1815 GET_MODE (mem));
1818 /* Set the expr for MEM to EXPR. */
1820 void
1821 set_mem_expr (rtx mem, tree expr)
1823 MEM_ATTRS (mem)
1824 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1825 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1828 /* Set the offset of MEM to OFFSET. */
1830 void
1831 set_mem_offset (rtx mem, rtx offset)
1833 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1834 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1835 GET_MODE (mem));
1838 /* Set the size of MEM to SIZE. */
1840 void
1841 set_mem_size (rtx mem, rtx size)
1843 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1844 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1845 GET_MODE (mem));
1848 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1849 and its address changed to ADDR. (VOIDmode means don't change the mode.
1850 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1851 returned memory location is required to be valid. The memory
1852 attributes are not changed. */
1854 static rtx
1855 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1857 rtx new;
1859 gcc_assert (MEM_P (memref));
1860 if (mode == VOIDmode)
1861 mode = GET_MODE (memref);
1862 if (addr == 0)
1863 addr = XEXP (memref, 0);
1864 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1865 && (!validate || memory_address_p (mode, addr)))
1866 return memref;
1868 if (validate)
1870 if (reload_in_progress || reload_completed)
1871 gcc_assert (memory_address_p (mode, addr));
1872 else
1873 addr = memory_address (mode, addr);
1876 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1877 return memref;
1879 new = gen_rtx_MEM (mode, addr);
1880 MEM_COPY_ATTRIBUTES (new, memref);
1881 return new;
1884 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1885 way we are changing MEMREF, so we only preserve the alias set. */
1888 change_address (rtx memref, enum machine_mode mode, rtx addr)
1890 rtx new = change_address_1 (memref, mode, addr, 1), size;
1891 enum machine_mode mmode = GET_MODE (new);
1892 unsigned int align;
1894 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1895 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1897 /* If there are no changes, just return the original memory reference. */
1898 if (new == memref)
1900 if (MEM_ATTRS (memref) == 0
1901 || (MEM_EXPR (memref) == NULL
1902 && MEM_OFFSET (memref) == NULL
1903 && MEM_SIZE (memref) == size
1904 && MEM_ALIGN (memref) == align))
1905 return new;
1907 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1908 MEM_COPY_ATTRIBUTES (new, memref);
1911 MEM_ATTRS (new)
1912 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1914 return new;
1917 /* Return a memory reference like MEMREF, but with its mode changed
1918 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1919 nonzero, the memory address is forced to be valid.
1920 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1921 and caller is responsible for adjusting MEMREF base register. */
1924 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1925 int validate, int adjust)
1927 rtx addr = XEXP (memref, 0);
1928 rtx new;
1929 rtx memoffset = MEM_OFFSET (memref);
1930 rtx size = 0;
1931 unsigned int memalign = MEM_ALIGN (memref);
1933 /* If there are no changes, just return the original memory reference. */
1934 if (mode == GET_MODE (memref) && !offset
1935 && (!validate || memory_address_p (mode, addr)))
1936 return memref;
1938 /* ??? Prefer to create garbage instead of creating shared rtl.
1939 This may happen even if offset is nonzero -- consider
1940 (plus (plus reg reg) const_int) -- so do this always. */
1941 addr = copy_rtx (addr);
1943 if (adjust)
1945 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1946 object, we can merge it into the LO_SUM. */
1947 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1948 && offset >= 0
1949 && (unsigned HOST_WIDE_INT) offset
1950 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1951 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1952 plus_constant (XEXP (addr, 1), offset));
1953 else
1954 addr = plus_constant (addr, offset);
1957 new = change_address_1 (memref, mode, addr, validate);
1959 /* Compute the new values of the memory attributes due to this adjustment.
1960 We add the offsets and update the alignment. */
1961 if (memoffset)
1962 memoffset = GEN_INT (offset + INTVAL (memoffset));
1964 /* Compute the new alignment by taking the MIN of the alignment and the
1965 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1966 if zero. */
1967 if (offset != 0)
1968 memalign
1969 = MIN (memalign,
1970 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1972 /* We can compute the size in a number of ways. */
1973 if (GET_MODE (new) != BLKmode)
1974 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1975 else if (MEM_SIZE (memref))
1976 size = plus_constant (MEM_SIZE (memref), -offset);
1978 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1979 memoffset, size, memalign, GET_MODE (new));
1981 /* At some point, we should validate that this offset is within the object,
1982 if all the appropriate values are known. */
1983 return new;
1986 /* Return a memory reference like MEMREF, but with its mode changed
1987 to MODE and its address changed to ADDR, which is assumed to be
1988 MEMREF offseted by OFFSET bytes. If VALIDATE is
1989 nonzero, the memory address is forced to be valid. */
1992 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1993 HOST_WIDE_INT offset, int validate)
1995 memref = change_address_1 (memref, VOIDmode, addr, validate);
1996 return adjust_address_1 (memref, mode, offset, validate, 0);
1999 /* Return a memory reference like MEMREF, but whose address is changed by
2000 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2001 known to be in OFFSET (possibly 1). */
2004 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2006 rtx new, addr = XEXP (memref, 0);
2008 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2010 /* At this point we don't know _why_ the address is invalid. It
2011 could have secondary memory references, multiplies or anything.
2013 However, if we did go and rearrange things, we can wind up not
2014 being able to recognize the magic around pic_offset_table_rtx.
2015 This stuff is fragile, and is yet another example of why it is
2016 bad to expose PIC machinery too early. */
2017 if (! memory_address_p (GET_MODE (memref), new)
2018 && GET_CODE (addr) == PLUS
2019 && XEXP (addr, 0) == pic_offset_table_rtx)
2021 addr = force_reg (GET_MODE (addr), addr);
2022 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2025 update_temp_slot_address (XEXP (memref, 0), new);
2026 new = change_address_1 (memref, VOIDmode, new, 1);
2028 /* If there are no changes, just return the original memory reference. */
2029 if (new == memref)
2030 return new;
2032 /* Update the alignment to reflect the offset. Reset the offset, which
2033 we don't know. */
2034 MEM_ATTRS (new)
2035 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2036 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2037 GET_MODE (new));
2038 return new;
2041 /* Return a memory reference like MEMREF, but with its address changed to
2042 ADDR. The caller is asserting that the actual piece of memory pointed
2043 to is the same, just the form of the address is being changed, such as
2044 by putting something into a register. */
2047 replace_equiv_address (rtx memref, rtx addr)
2049 /* change_address_1 copies the memory attribute structure without change
2050 and that's exactly what we want here. */
2051 update_temp_slot_address (XEXP (memref, 0), addr);
2052 return change_address_1 (memref, VOIDmode, addr, 1);
2055 /* Likewise, but the reference is not required to be valid. */
2058 replace_equiv_address_nv (rtx memref, rtx addr)
2060 return change_address_1 (memref, VOIDmode, addr, 0);
2063 /* Return a memory reference like MEMREF, but with its mode widened to
2064 MODE and offset by OFFSET. This would be used by targets that e.g.
2065 cannot issue QImode memory operations and have to use SImode memory
2066 operations plus masking logic. */
2069 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2071 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2072 tree expr = MEM_EXPR (new);
2073 rtx memoffset = MEM_OFFSET (new);
2074 unsigned int size = GET_MODE_SIZE (mode);
2076 /* If there are no changes, just return the original memory reference. */
2077 if (new == memref)
2078 return new;
2080 /* If we don't know what offset we were at within the expression, then
2081 we can't know if we've overstepped the bounds. */
2082 if (! memoffset)
2083 expr = NULL_TREE;
2085 while (expr)
2087 if (TREE_CODE (expr) == COMPONENT_REF)
2089 tree field = TREE_OPERAND (expr, 1);
2090 tree offset = component_ref_field_offset (expr);
2092 if (! DECL_SIZE_UNIT (field))
2094 expr = NULL_TREE;
2095 break;
2098 /* Is the field at least as large as the access? If so, ok,
2099 otherwise strip back to the containing structure. */
2100 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2101 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2102 && INTVAL (memoffset) >= 0)
2103 break;
2105 if (! host_integerp (offset, 1))
2107 expr = NULL_TREE;
2108 break;
2111 expr = TREE_OPERAND (expr, 0);
2112 memoffset
2113 = (GEN_INT (INTVAL (memoffset)
2114 + tree_low_cst (offset, 1)
2115 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2116 / BITS_PER_UNIT)));
2118 /* Similarly for the decl. */
2119 else if (DECL_P (expr)
2120 && DECL_SIZE_UNIT (expr)
2121 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2122 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2123 && (! memoffset || INTVAL (memoffset) >= 0))
2124 break;
2125 else
2127 /* The widened memory access overflows the expression, which means
2128 that it could alias another expression. Zap it. */
2129 expr = NULL_TREE;
2130 break;
2134 if (! expr)
2135 memoffset = NULL_RTX;
2137 /* The widened memory may alias other stuff, so zap the alias set. */
2138 /* ??? Maybe use get_alias_set on any remaining expression. */
2140 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2141 MEM_ALIGN (new), mode);
2143 return new;
2146 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2149 gen_label_rtx (void)
2151 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2152 NULL, label_num++, NULL);
2155 /* For procedure integration. */
2157 /* Install new pointers to the first and last insns in the chain.
2158 Also, set cur_insn_uid to one higher than the last in use.
2159 Used for an inline-procedure after copying the insn chain. */
2161 void
2162 set_new_first_and_last_insn (rtx first, rtx last)
2164 rtx insn;
2166 first_insn = first;
2167 last_insn = last;
2168 cur_insn_uid = 0;
2170 for (insn = first; insn; insn = NEXT_INSN (insn))
2171 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2173 cur_insn_uid++;
2176 /* Go through all the RTL insn bodies and copy any invalid shared
2177 structure. This routine should only be called once. */
2179 static void
2180 unshare_all_rtl_1 (tree fndecl, rtx insn)
2182 tree decl;
2184 /* Make sure that virtual parameters are not shared. */
2185 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2186 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2188 /* Make sure that virtual stack slots are not shared. */
2189 unshare_all_decls (DECL_INITIAL (fndecl));
2191 /* Unshare just about everything else. */
2192 unshare_all_rtl_in_chain (insn);
2194 /* Make sure the addresses of stack slots found outside the insn chain
2195 (such as, in DECL_RTL of a variable) are not shared
2196 with the insn chain.
2198 This special care is necessary when the stack slot MEM does not
2199 actually appear in the insn chain. If it does appear, its address
2200 is unshared from all else at that point. */
2201 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2204 /* Go through all the RTL insn bodies and copy any invalid shared
2205 structure, again. This is a fairly expensive thing to do so it
2206 should be done sparingly. */
2208 void
2209 unshare_all_rtl_again (rtx insn)
2211 rtx p;
2212 tree decl;
2214 for (p = insn; p; p = NEXT_INSN (p))
2215 if (INSN_P (p))
2217 reset_used_flags (PATTERN (p));
2218 reset_used_flags (REG_NOTES (p));
2219 reset_used_flags (LOG_LINKS (p));
2222 /* Make sure that virtual stack slots are not shared. */
2223 reset_used_decls (DECL_INITIAL (cfun->decl));
2225 /* Make sure that virtual parameters are not shared. */
2226 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2227 reset_used_flags (DECL_RTL (decl));
2229 reset_used_flags (stack_slot_list);
2231 unshare_all_rtl_1 (cfun->decl, insn);
2234 void
2235 unshare_all_rtl (void)
2237 unshare_all_rtl_1 (current_function_decl, get_insns ());
2240 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2241 Recursively does the same for subexpressions. */
2243 static void
2244 verify_rtx_sharing (rtx orig, rtx insn)
2246 rtx x = orig;
2247 int i;
2248 enum rtx_code code;
2249 const char *format_ptr;
2251 if (x == 0)
2252 return;
2254 code = GET_CODE (x);
2256 /* These types may be freely shared. */
2258 switch (code)
2260 case REG:
2261 case CONST_INT:
2262 case CONST_DOUBLE:
2263 case CONST_VECTOR:
2264 case SYMBOL_REF:
2265 case LABEL_REF:
2266 case CODE_LABEL:
2267 case PC:
2268 case CC0:
2269 case SCRATCH:
2270 return;
2271 /* SCRATCH must be shared because they represent distinct values. */
2272 case CLOBBER:
2273 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2274 return;
2275 break;
2277 case CONST:
2278 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2279 a LABEL_REF, it isn't sharable. */
2280 if (GET_CODE (XEXP (x, 0)) == PLUS
2281 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2282 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2283 return;
2284 break;
2286 case MEM:
2287 /* A MEM is allowed to be shared if its address is constant. */
2288 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2289 || reload_completed || reload_in_progress)
2290 return;
2292 break;
2294 default:
2295 break;
2298 /* This rtx may not be shared. If it has already been seen,
2299 replace it with a copy of itself. */
2300 #ifdef ENABLE_CHECKING
2301 if (RTX_FLAG (x, used))
2303 error ("Invalid rtl sharing found in the insn");
2304 debug_rtx (insn);
2305 error ("Shared rtx");
2306 debug_rtx (x);
2307 internal_error ("Internal consistency failure");
2309 #endif
2310 gcc_assert (!RTX_FLAG (x, used));
2312 RTX_FLAG (x, used) = 1;
2314 /* Now scan the subexpressions recursively. */
2316 format_ptr = GET_RTX_FORMAT (code);
2318 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2320 switch (*format_ptr++)
2322 case 'e':
2323 verify_rtx_sharing (XEXP (x, i), insn);
2324 break;
2326 case 'E':
2327 if (XVEC (x, i) != NULL)
2329 int j;
2330 int len = XVECLEN (x, i);
2332 for (j = 0; j < len; j++)
2334 /* We allow sharing of ASM_OPERANDS inside single
2335 instruction. */
2336 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2337 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2338 == ASM_OPERANDS))
2339 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2340 else
2341 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2344 break;
2347 return;
2350 /* Go through all the RTL insn bodies and check that there is no unexpected
2351 sharing in between the subexpressions. */
2353 void
2354 verify_rtl_sharing (void)
2356 rtx p;
2358 for (p = get_insns (); p; p = NEXT_INSN (p))
2359 if (INSN_P (p))
2361 reset_used_flags (PATTERN (p));
2362 reset_used_flags (REG_NOTES (p));
2363 reset_used_flags (LOG_LINKS (p));
2366 for (p = get_insns (); p; p = NEXT_INSN (p))
2367 if (INSN_P (p))
2369 verify_rtx_sharing (PATTERN (p), p);
2370 verify_rtx_sharing (REG_NOTES (p), p);
2371 verify_rtx_sharing (LOG_LINKS (p), p);
2375 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2376 Assumes the mark bits are cleared at entry. */
2378 void
2379 unshare_all_rtl_in_chain (rtx insn)
2381 for (; insn; insn = NEXT_INSN (insn))
2382 if (INSN_P (insn))
2384 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2385 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2386 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2390 /* Go through all virtual stack slots of a function and copy any
2391 shared structure. */
2392 static void
2393 unshare_all_decls (tree blk)
2395 tree t;
2397 /* Copy shared decls. */
2398 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2399 if (DECL_RTL_SET_P (t))
2400 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2402 /* Now process sub-blocks. */
2403 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2404 unshare_all_decls (t);
2407 /* Go through all virtual stack slots of a function and mark them as
2408 not shared. */
2409 static void
2410 reset_used_decls (tree blk)
2412 tree t;
2414 /* Mark decls. */
2415 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2416 if (DECL_RTL_SET_P (t))
2417 reset_used_flags (DECL_RTL (t));
2419 /* Now process sub-blocks. */
2420 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2421 reset_used_decls (t);
2424 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2425 Recursively does the same for subexpressions. Uses
2426 copy_rtx_if_shared_1 to reduce stack space. */
2429 copy_rtx_if_shared (rtx orig)
2431 copy_rtx_if_shared_1 (&orig);
2432 return orig;
2435 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2436 use. Recursively does the same for subexpressions. */
2438 static void
2439 copy_rtx_if_shared_1 (rtx *orig1)
2441 rtx x;
2442 int i;
2443 enum rtx_code code;
2444 rtx *last_ptr;
2445 const char *format_ptr;
2446 int copied = 0;
2447 int length;
2449 /* Repeat is used to turn tail-recursion into iteration. */
2450 repeat:
2451 x = *orig1;
2453 if (x == 0)
2454 return;
2456 code = GET_CODE (x);
2458 /* These types may be freely shared. */
2460 switch (code)
2462 case REG:
2463 case CONST_INT:
2464 case CONST_DOUBLE:
2465 case CONST_VECTOR:
2466 case SYMBOL_REF:
2467 case LABEL_REF:
2468 case CODE_LABEL:
2469 case PC:
2470 case CC0:
2471 case SCRATCH:
2472 /* SCRATCH must be shared because they represent distinct values. */
2473 return;
2474 case CLOBBER:
2475 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2476 return;
2477 break;
2479 case CONST:
2480 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2481 a LABEL_REF, it isn't sharable. */
2482 if (GET_CODE (XEXP (x, 0)) == PLUS
2483 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2484 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2485 return;
2486 break;
2488 case INSN:
2489 case JUMP_INSN:
2490 case CALL_INSN:
2491 case NOTE:
2492 case BARRIER:
2493 /* The chain of insns is not being copied. */
2494 return;
2496 default:
2497 break;
2500 /* This rtx may not be shared. If it has already been seen,
2501 replace it with a copy of itself. */
2503 if (RTX_FLAG (x, used))
2505 rtx copy;
2507 copy = rtx_alloc (code);
2508 memcpy (copy, x, RTX_SIZE (code));
2509 x = copy;
2510 copied = 1;
2512 RTX_FLAG (x, used) = 1;
2514 /* Now scan the subexpressions recursively.
2515 We can store any replaced subexpressions directly into X
2516 since we know X is not shared! Any vectors in X
2517 must be copied if X was copied. */
2519 format_ptr = GET_RTX_FORMAT (code);
2520 length = GET_RTX_LENGTH (code);
2521 last_ptr = NULL;
2523 for (i = 0; i < length; i++)
2525 switch (*format_ptr++)
2527 case 'e':
2528 if (last_ptr)
2529 copy_rtx_if_shared_1 (last_ptr);
2530 last_ptr = &XEXP (x, i);
2531 break;
2533 case 'E':
2534 if (XVEC (x, i) != NULL)
2536 int j;
2537 int len = XVECLEN (x, i);
2539 /* Copy the vector iff I copied the rtx and the length
2540 is nonzero. */
2541 if (copied && len > 0)
2542 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2544 /* Call recursively on all inside the vector. */
2545 for (j = 0; j < len; j++)
2547 if (last_ptr)
2548 copy_rtx_if_shared_1 (last_ptr);
2549 last_ptr = &XVECEXP (x, i, j);
2552 break;
2555 *orig1 = x;
2556 if (last_ptr)
2558 orig1 = last_ptr;
2559 goto repeat;
2561 return;
2564 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2565 to look for shared sub-parts. */
2567 void
2568 reset_used_flags (rtx x)
2570 int i, j;
2571 enum rtx_code code;
2572 const char *format_ptr;
2573 int length;
2575 /* Repeat is used to turn tail-recursion into iteration. */
2576 repeat:
2577 if (x == 0)
2578 return;
2580 code = GET_CODE (x);
2582 /* These types may be freely shared so we needn't do any resetting
2583 for them. */
2585 switch (code)
2587 case REG:
2588 case CONST_INT:
2589 case CONST_DOUBLE:
2590 case CONST_VECTOR:
2591 case SYMBOL_REF:
2592 case CODE_LABEL:
2593 case PC:
2594 case CC0:
2595 return;
2597 case INSN:
2598 case JUMP_INSN:
2599 case CALL_INSN:
2600 case NOTE:
2601 case LABEL_REF:
2602 case BARRIER:
2603 /* The chain of insns is not being copied. */
2604 return;
2606 default:
2607 break;
2610 RTX_FLAG (x, used) = 0;
2612 format_ptr = GET_RTX_FORMAT (code);
2613 length = GET_RTX_LENGTH (code);
2615 for (i = 0; i < length; i++)
2617 switch (*format_ptr++)
2619 case 'e':
2620 if (i == length-1)
2622 x = XEXP (x, i);
2623 goto repeat;
2625 reset_used_flags (XEXP (x, i));
2626 break;
2628 case 'E':
2629 for (j = 0; j < XVECLEN (x, i); j++)
2630 reset_used_flags (XVECEXP (x, i, j));
2631 break;
2636 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2637 to look for shared sub-parts. */
2639 void
2640 set_used_flags (rtx x)
2642 int i, j;
2643 enum rtx_code code;
2644 const char *format_ptr;
2646 if (x == 0)
2647 return;
2649 code = GET_CODE (x);
2651 /* These types may be freely shared so we needn't do any resetting
2652 for them. */
2654 switch (code)
2656 case REG:
2657 case CONST_INT:
2658 case CONST_DOUBLE:
2659 case CONST_VECTOR:
2660 case SYMBOL_REF:
2661 case CODE_LABEL:
2662 case PC:
2663 case CC0:
2664 return;
2666 case INSN:
2667 case JUMP_INSN:
2668 case CALL_INSN:
2669 case NOTE:
2670 case LABEL_REF:
2671 case BARRIER:
2672 /* The chain of insns is not being copied. */
2673 return;
2675 default:
2676 break;
2679 RTX_FLAG (x, used) = 1;
2681 format_ptr = GET_RTX_FORMAT (code);
2682 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2684 switch (*format_ptr++)
2686 case 'e':
2687 set_used_flags (XEXP (x, i));
2688 break;
2690 case 'E':
2691 for (j = 0; j < XVECLEN (x, i); j++)
2692 set_used_flags (XVECEXP (x, i, j));
2693 break;
2698 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2699 Return X or the rtx for the pseudo reg the value of X was copied into.
2700 OTHER must be valid as a SET_DEST. */
2703 make_safe_from (rtx x, rtx other)
2705 while (1)
2706 switch (GET_CODE (other))
2708 case SUBREG:
2709 other = SUBREG_REG (other);
2710 break;
2711 case STRICT_LOW_PART:
2712 case SIGN_EXTEND:
2713 case ZERO_EXTEND:
2714 other = XEXP (other, 0);
2715 break;
2716 default:
2717 goto done;
2719 done:
2720 if ((MEM_P (other)
2721 && ! CONSTANT_P (x)
2722 && !REG_P (x)
2723 && GET_CODE (x) != SUBREG)
2724 || (REG_P (other)
2725 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2726 || reg_mentioned_p (other, x))))
2728 rtx temp = gen_reg_rtx (GET_MODE (x));
2729 emit_move_insn (temp, x);
2730 return temp;
2732 return x;
2735 /* Emission of insns (adding them to the doubly-linked list). */
2737 /* Return the first insn of the current sequence or current function. */
2740 get_insns (void)
2742 return first_insn;
2745 /* Specify a new insn as the first in the chain. */
2747 void
2748 set_first_insn (rtx insn)
2750 gcc_assert (!PREV_INSN (insn));
2751 first_insn = insn;
2754 /* Return the last insn emitted in current sequence or current function. */
2757 get_last_insn (void)
2759 return last_insn;
2762 /* Specify a new insn as the last in the chain. */
2764 void
2765 set_last_insn (rtx insn)
2767 gcc_assert (!NEXT_INSN (insn));
2768 last_insn = insn;
2771 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2774 get_last_insn_anywhere (void)
2776 struct sequence_stack *stack;
2777 if (last_insn)
2778 return last_insn;
2779 for (stack = seq_stack; stack; stack = stack->next)
2780 if (stack->last != 0)
2781 return stack->last;
2782 return 0;
2785 /* Return the first nonnote insn emitted in current sequence or current
2786 function. This routine looks inside SEQUENCEs. */
2789 get_first_nonnote_insn (void)
2791 rtx insn;
2793 for (insn = first_insn; insn && NOTE_P (insn); insn = next_insn (insn));
2794 return insn;
2797 /* Return the last nonnote insn emitted in current sequence or current
2798 function. This routine looks inside SEQUENCEs. */
2801 get_last_nonnote_insn (void)
2803 rtx insn;
2805 for (insn = last_insn; insn && NOTE_P (insn); insn = previous_insn (insn));
2806 return insn;
2809 /* Return a number larger than any instruction's uid in this function. */
2812 get_max_uid (void)
2814 return cur_insn_uid;
2817 /* Renumber instructions so that no instruction UIDs are wasted. */
2819 void
2820 renumber_insns (FILE *stream)
2822 rtx insn;
2824 /* If we're not supposed to renumber instructions, don't. */
2825 if (!flag_renumber_insns)
2826 return;
2828 /* If there aren't that many instructions, then it's not really
2829 worth renumbering them. */
2830 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2831 return;
2833 cur_insn_uid = 1;
2835 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2837 if (stream)
2838 fprintf (stream, "Renumbering insn %d to %d\n",
2839 INSN_UID (insn), cur_insn_uid);
2840 INSN_UID (insn) = cur_insn_uid++;
2844 /* Return the next insn. If it is a SEQUENCE, return the first insn
2845 of the sequence. */
2848 next_insn (rtx insn)
2850 if (insn)
2852 insn = NEXT_INSN (insn);
2853 if (insn && NONJUMP_INSN_P (insn)
2854 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2855 insn = XVECEXP (PATTERN (insn), 0, 0);
2858 return insn;
2861 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2862 of the sequence. */
2865 previous_insn (rtx insn)
2867 if (insn)
2869 insn = PREV_INSN (insn);
2870 if (insn && NONJUMP_INSN_P (insn)
2871 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2872 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2875 return insn;
2878 /* Return the next insn after INSN that is not a NOTE. This routine does not
2879 look inside SEQUENCEs. */
2882 next_nonnote_insn (rtx insn)
2884 while (insn)
2886 insn = NEXT_INSN (insn);
2887 if (insn == 0 || !NOTE_P (insn))
2888 break;
2891 return insn;
2894 /* Return the previous insn before INSN that is not a NOTE. This routine does
2895 not look inside SEQUENCEs. */
2898 prev_nonnote_insn (rtx insn)
2900 while (insn)
2902 insn = PREV_INSN (insn);
2903 if (insn == 0 || !NOTE_P (insn))
2904 break;
2907 return insn;
2910 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2911 or 0, if there is none. This routine does not look inside
2912 SEQUENCEs. */
2915 next_real_insn (rtx insn)
2917 while (insn)
2919 insn = NEXT_INSN (insn);
2920 if (insn == 0 || INSN_P (insn))
2921 break;
2924 return insn;
2927 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2928 or 0, if there is none. This routine does not look inside
2929 SEQUENCEs. */
2932 prev_real_insn (rtx insn)
2934 while (insn)
2936 insn = PREV_INSN (insn);
2937 if (insn == 0 || INSN_P (insn))
2938 break;
2941 return insn;
2944 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2945 This routine does not look inside SEQUENCEs. */
2948 last_call_insn (void)
2950 rtx insn;
2952 for (insn = get_last_insn ();
2953 insn && !CALL_P (insn);
2954 insn = PREV_INSN (insn))
2957 return insn;
2960 /* Find the next insn after INSN that really does something. This routine
2961 does not look inside SEQUENCEs. Until reload has completed, this is the
2962 same as next_real_insn. */
2965 active_insn_p (rtx insn)
2967 return (CALL_P (insn) || JUMP_P (insn)
2968 || (NONJUMP_INSN_P (insn)
2969 && (! reload_completed
2970 || (GET_CODE (PATTERN (insn)) != USE
2971 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2975 next_active_insn (rtx insn)
2977 while (insn)
2979 insn = NEXT_INSN (insn);
2980 if (insn == 0 || active_insn_p (insn))
2981 break;
2984 return insn;
2987 /* Find the last insn before INSN that really does something. This routine
2988 does not look inside SEQUENCEs. Until reload has completed, this is the
2989 same as prev_real_insn. */
2992 prev_active_insn (rtx insn)
2994 while (insn)
2996 insn = PREV_INSN (insn);
2997 if (insn == 0 || active_insn_p (insn))
2998 break;
3001 return insn;
3004 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3007 next_label (rtx insn)
3009 while (insn)
3011 insn = NEXT_INSN (insn);
3012 if (insn == 0 || LABEL_P (insn))
3013 break;
3016 return insn;
3019 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3022 prev_label (rtx insn)
3024 while (insn)
3026 insn = PREV_INSN (insn);
3027 if (insn == 0 || LABEL_P (insn))
3028 break;
3031 return insn;
3034 /* Return the last label to mark the same position as LABEL. Return null
3035 if LABEL itself is null. */
3038 skip_consecutive_labels (rtx label)
3040 rtx insn;
3042 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3043 if (LABEL_P (insn))
3044 label = insn;
3046 return label;
3049 #ifdef HAVE_cc0
3050 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3051 and REG_CC_USER notes so we can find it. */
3053 void
3054 link_cc0_insns (rtx insn)
3056 rtx user = next_nonnote_insn (insn);
3058 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3059 user = XVECEXP (PATTERN (user), 0, 0);
3061 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3062 REG_NOTES (user));
3063 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3066 /* Return the next insn that uses CC0 after INSN, which is assumed to
3067 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3068 applied to the result of this function should yield INSN).
3070 Normally, this is simply the next insn. However, if a REG_CC_USER note
3071 is present, it contains the insn that uses CC0.
3073 Return 0 if we can't find the insn. */
3076 next_cc0_user (rtx insn)
3078 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3080 if (note)
3081 return XEXP (note, 0);
3083 insn = next_nonnote_insn (insn);
3084 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3085 insn = XVECEXP (PATTERN (insn), 0, 0);
3087 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3088 return insn;
3090 return 0;
3093 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3094 note, it is the previous insn. */
3097 prev_cc0_setter (rtx insn)
3099 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3101 if (note)
3102 return XEXP (note, 0);
3104 insn = prev_nonnote_insn (insn);
3105 gcc_assert (sets_cc0_p (PATTERN (insn)));
3107 return insn;
3109 #endif
3111 /* Increment the label uses for all labels present in rtx. */
3113 static void
3114 mark_label_nuses (rtx x)
3116 enum rtx_code code;
3117 int i, j;
3118 const char *fmt;
3120 code = GET_CODE (x);
3121 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3122 LABEL_NUSES (XEXP (x, 0))++;
3124 fmt = GET_RTX_FORMAT (code);
3125 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3127 if (fmt[i] == 'e')
3128 mark_label_nuses (XEXP (x, i));
3129 else if (fmt[i] == 'E')
3130 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3131 mark_label_nuses (XVECEXP (x, i, j));
3136 /* Try splitting insns that can be split for better scheduling.
3137 PAT is the pattern which might split.
3138 TRIAL is the insn providing PAT.
3139 LAST is nonzero if we should return the last insn of the sequence produced.
3141 If this routine succeeds in splitting, it returns the first or last
3142 replacement insn depending on the value of LAST. Otherwise, it
3143 returns TRIAL. If the insn to be returned can be split, it will be. */
3146 try_split (rtx pat, rtx trial, int last)
3148 rtx before = PREV_INSN (trial);
3149 rtx after = NEXT_INSN (trial);
3150 int has_barrier = 0;
3151 rtx tem;
3152 rtx note, seq;
3153 int probability;
3154 rtx insn_last, insn;
3155 int njumps = 0;
3157 if (any_condjump_p (trial)
3158 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3159 split_branch_probability = INTVAL (XEXP (note, 0));
3160 probability = split_branch_probability;
3162 seq = split_insns (pat, trial);
3164 split_branch_probability = -1;
3166 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3167 We may need to handle this specially. */
3168 if (after && BARRIER_P (after))
3170 has_barrier = 1;
3171 after = NEXT_INSN (after);
3174 if (!seq)
3175 return trial;
3177 /* Avoid infinite loop if any insn of the result matches
3178 the original pattern. */
3179 insn_last = seq;
3180 while (1)
3182 if (INSN_P (insn_last)
3183 && rtx_equal_p (PATTERN (insn_last), pat))
3184 return trial;
3185 if (!NEXT_INSN (insn_last))
3186 break;
3187 insn_last = NEXT_INSN (insn_last);
3190 /* Mark labels. */
3191 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3193 if (JUMP_P (insn))
3195 mark_jump_label (PATTERN (insn), insn, 0);
3196 njumps++;
3197 if (probability != -1
3198 && any_condjump_p (insn)
3199 && !find_reg_note (insn, REG_BR_PROB, 0))
3201 /* We can preserve the REG_BR_PROB notes only if exactly
3202 one jump is created, otherwise the machine description
3203 is responsible for this step using
3204 split_branch_probability variable. */
3205 gcc_assert (njumps == 1);
3206 REG_NOTES (insn)
3207 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3208 GEN_INT (probability),
3209 REG_NOTES (insn));
3214 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3215 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3216 if (CALL_P (trial))
3218 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3219 if (CALL_P (insn))
3221 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3222 while (*p)
3223 p = &XEXP (*p, 1);
3224 *p = CALL_INSN_FUNCTION_USAGE (trial);
3225 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3229 /* Copy notes, particularly those related to the CFG. */
3230 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3232 switch (REG_NOTE_KIND (note))
3234 case REG_EH_REGION:
3235 insn = insn_last;
3236 while (insn != NULL_RTX)
3238 if (CALL_P (insn)
3239 || (flag_non_call_exceptions && INSN_P (insn)
3240 && may_trap_p (PATTERN (insn))))
3241 REG_NOTES (insn)
3242 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3243 XEXP (note, 0),
3244 REG_NOTES (insn));
3245 insn = PREV_INSN (insn);
3247 break;
3249 case REG_NORETURN:
3250 case REG_SETJMP:
3251 case REG_ALWAYS_RETURN:
3252 insn = insn_last;
3253 while (insn != NULL_RTX)
3255 if (CALL_P (insn))
3256 REG_NOTES (insn)
3257 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3258 XEXP (note, 0),
3259 REG_NOTES (insn));
3260 insn = PREV_INSN (insn);
3262 break;
3264 case REG_NON_LOCAL_GOTO:
3265 insn = insn_last;
3266 while (insn != NULL_RTX)
3268 if (JUMP_P (insn))
3269 REG_NOTES (insn)
3270 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3271 XEXP (note, 0),
3272 REG_NOTES (insn));
3273 insn = PREV_INSN (insn);
3275 break;
3277 default:
3278 break;
3282 /* If there are LABELS inside the split insns increment the
3283 usage count so we don't delete the label. */
3284 if (NONJUMP_INSN_P (trial))
3286 insn = insn_last;
3287 while (insn != NULL_RTX)
3289 if (NONJUMP_INSN_P (insn))
3290 mark_label_nuses (PATTERN (insn));
3292 insn = PREV_INSN (insn);
3296 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3298 delete_insn (trial);
3299 if (has_barrier)
3300 emit_barrier_after (tem);
3302 /* Recursively call try_split for each new insn created; by the
3303 time control returns here that insn will be fully split, so
3304 set LAST and continue from the insn after the one returned.
3305 We can't use next_active_insn here since AFTER may be a note.
3306 Ignore deleted insns, which can be occur if not optimizing. */
3307 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3308 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3309 tem = try_split (PATTERN (tem), tem, 1);
3311 /* Return either the first or the last insn, depending on which was
3312 requested. */
3313 return last
3314 ? (after ? PREV_INSN (after) : last_insn)
3315 : NEXT_INSN (before);
3318 /* Make and return an INSN rtx, initializing all its slots.
3319 Store PATTERN in the pattern slots. */
3322 make_insn_raw (rtx pattern)
3324 rtx insn;
3326 insn = rtx_alloc (INSN);
3328 INSN_UID (insn) = cur_insn_uid++;
3329 PATTERN (insn) = pattern;
3330 INSN_CODE (insn) = -1;
3331 LOG_LINKS (insn) = NULL;
3332 REG_NOTES (insn) = NULL;
3333 INSN_LOCATOR (insn) = 0;
3334 BLOCK_FOR_INSN (insn) = NULL;
3336 #ifdef ENABLE_RTL_CHECKING
3337 if (insn
3338 && INSN_P (insn)
3339 && (returnjump_p (insn)
3340 || (GET_CODE (insn) == SET
3341 && SET_DEST (insn) == pc_rtx)))
3343 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3344 debug_rtx (insn);
3346 #endif
3348 return insn;
3351 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3353 static rtx
3354 make_jump_insn_raw (rtx pattern)
3356 rtx insn;
3358 insn = rtx_alloc (JUMP_INSN);
3359 INSN_UID (insn) = cur_insn_uid++;
3361 PATTERN (insn) = pattern;
3362 INSN_CODE (insn) = -1;
3363 LOG_LINKS (insn) = NULL;
3364 REG_NOTES (insn) = NULL;
3365 JUMP_LABEL (insn) = NULL;
3366 INSN_LOCATOR (insn) = 0;
3367 BLOCK_FOR_INSN (insn) = NULL;
3369 return insn;
3372 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3374 static rtx
3375 make_call_insn_raw (rtx pattern)
3377 rtx insn;
3379 insn = rtx_alloc (CALL_INSN);
3380 INSN_UID (insn) = cur_insn_uid++;
3382 PATTERN (insn) = pattern;
3383 INSN_CODE (insn) = -1;
3384 LOG_LINKS (insn) = NULL;
3385 REG_NOTES (insn) = NULL;
3386 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3387 INSN_LOCATOR (insn) = 0;
3388 BLOCK_FOR_INSN (insn) = NULL;
3390 return insn;
3393 /* Add INSN to the end of the doubly-linked list.
3394 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3396 void
3397 add_insn (rtx insn)
3399 PREV_INSN (insn) = last_insn;
3400 NEXT_INSN (insn) = 0;
3402 if (NULL != last_insn)
3403 NEXT_INSN (last_insn) = insn;
3405 if (NULL == first_insn)
3406 first_insn = insn;
3408 last_insn = insn;
3411 /* Add INSN into the doubly-linked list after insn AFTER. This and
3412 the next should be the only functions called to insert an insn once
3413 delay slots have been filled since only they know how to update a
3414 SEQUENCE. */
3416 void
3417 add_insn_after (rtx insn, rtx after)
3419 rtx next = NEXT_INSN (after);
3420 basic_block bb;
3422 gcc_assert (!optimize || !INSN_DELETED_P (after));
3424 NEXT_INSN (insn) = next;
3425 PREV_INSN (insn) = after;
3427 if (next)
3429 PREV_INSN (next) = insn;
3430 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3431 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3433 else if (last_insn == after)
3434 last_insn = insn;
3435 else
3437 struct sequence_stack *stack = seq_stack;
3438 /* Scan all pending sequences too. */
3439 for (; stack; stack = stack->next)
3440 if (after == stack->last)
3442 stack->last = insn;
3443 break;
3446 gcc_assert (stack);
3449 if (!BARRIER_P (after)
3450 && !BARRIER_P (insn)
3451 && (bb = BLOCK_FOR_INSN (after)))
3453 set_block_for_insn (insn, bb);
3454 if (INSN_P (insn))
3455 bb->flags |= BB_DIRTY;
3456 /* Should not happen as first in the BB is always
3457 either NOTE or LABEL. */
3458 if (BB_END (bb) == after
3459 /* Avoid clobbering of structure when creating new BB. */
3460 && !BARRIER_P (insn)
3461 && (!NOTE_P (insn)
3462 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3463 BB_END (bb) = insn;
3466 NEXT_INSN (after) = insn;
3467 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3469 rtx sequence = PATTERN (after);
3470 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3474 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3475 the previous should be the only functions called to insert an insn once
3476 delay slots have been filled since only they know how to update a
3477 SEQUENCE. */
3479 void
3480 add_insn_before (rtx insn, rtx before)
3482 rtx prev = PREV_INSN (before);
3483 basic_block bb;
3485 gcc_assert (!optimize || !INSN_DELETED_P (before));
3487 PREV_INSN (insn) = prev;
3488 NEXT_INSN (insn) = before;
3490 if (prev)
3492 NEXT_INSN (prev) = insn;
3493 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3495 rtx sequence = PATTERN (prev);
3496 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3499 else if (first_insn == before)
3500 first_insn = insn;
3501 else
3503 struct sequence_stack *stack = seq_stack;
3504 /* Scan all pending sequences too. */
3505 for (; stack; stack = stack->next)
3506 if (before == stack->first)
3508 stack->first = insn;
3509 break;
3512 gcc_assert (stack);
3515 if (!BARRIER_P (before)
3516 && !BARRIER_P (insn)
3517 && (bb = BLOCK_FOR_INSN (before)))
3519 set_block_for_insn (insn, bb);
3520 if (INSN_P (insn))
3521 bb->flags |= BB_DIRTY;
3522 /* Should not happen as first in the BB is always either NOTE or
3523 LABEl. */
3524 gcc_assert (BB_HEAD (bb) != insn
3525 /* Avoid clobbering of structure when creating new BB. */
3526 || BARRIER_P (insn)
3527 || (NOTE_P (insn)
3528 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3531 PREV_INSN (before) = insn;
3532 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3533 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3536 /* Remove an insn from its doubly-linked list. This function knows how
3537 to handle sequences. */
3538 void
3539 remove_insn (rtx insn)
3541 rtx next = NEXT_INSN (insn);
3542 rtx prev = PREV_INSN (insn);
3543 basic_block bb;
3545 if (prev)
3547 NEXT_INSN (prev) = next;
3548 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3550 rtx sequence = PATTERN (prev);
3551 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3554 else if (first_insn == insn)
3555 first_insn = next;
3556 else
3558 struct sequence_stack *stack = seq_stack;
3559 /* Scan all pending sequences too. */
3560 for (; stack; stack = stack->next)
3561 if (insn == stack->first)
3563 stack->first = next;
3564 break;
3567 gcc_assert (stack);
3570 if (next)
3572 PREV_INSN (next) = prev;
3573 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3574 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3576 else if (last_insn == insn)
3577 last_insn = prev;
3578 else
3580 struct sequence_stack *stack = seq_stack;
3581 /* Scan all pending sequences too. */
3582 for (; stack; stack = stack->next)
3583 if (insn == stack->last)
3585 stack->last = prev;
3586 break;
3589 gcc_assert (stack);
3591 if (!BARRIER_P (insn)
3592 && (bb = BLOCK_FOR_INSN (insn)))
3594 if (INSN_P (insn))
3595 bb->flags |= BB_DIRTY;
3596 if (BB_HEAD (bb) == insn)
3598 /* Never ever delete the basic block note without deleting whole
3599 basic block. */
3600 gcc_assert (!NOTE_P (insn));
3601 BB_HEAD (bb) = next;
3603 if (BB_END (bb) == insn)
3604 BB_END (bb) = prev;
3608 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3610 void
3611 add_function_usage_to (rtx call_insn, rtx call_fusage)
3613 gcc_assert (call_insn && CALL_P (call_insn));
3615 /* Put the register usage information on the CALL. If there is already
3616 some usage information, put ours at the end. */
3617 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3619 rtx link;
3621 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3622 link = XEXP (link, 1))
3625 XEXP (link, 1) = call_fusage;
3627 else
3628 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3631 /* Delete all insns made since FROM.
3632 FROM becomes the new last instruction. */
3634 void
3635 delete_insns_since (rtx from)
3637 if (from == 0)
3638 first_insn = 0;
3639 else
3640 NEXT_INSN (from) = 0;
3641 last_insn = from;
3644 /* This function is deprecated, please use sequences instead.
3646 Move a consecutive bunch of insns to a different place in the chain.
3647 The insns to be moved are those between FROM and TO.
3648 They are moved to a new position after the insn AFTER.
3649 AFTER must not be FROM or TO or any insn in between.
3651 This function does not know about SEQUENCEs and hence should not be
3652 called after delay-slot filling has been done. */
3654 void
3655 reorder_insns_nobb (rtx from, rtx to, rtx after)
3657 /* Splice this bunch out of where it is now. */
3658 if (PREV_INSN (from))
3659 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3660 if (NEXT_INSN (to))
3661 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3662 if (last_insn == to)
3663 last_insn = PREV_INSN (from);
3664 if (first_insn == from)
3665 first_insn = NEXT_INSN (to);
3667 /* Make the new neighbors point to it and it to them. */
3668 if (NEXT_INSN (after))
3669 PREV_INSN (NEXT_INSN (after)) = to;
3671 NEXT_INSN (to) = NEXT_INSN (after);
3672 PREV_INSN (from) = after;
3673 NEXT_INSN (after) = from;
3674 if (after == last_insn)
3675 last_insn = to;
3678 /* Same as function above, but take care to update BB boundaries. */
3679 void
3680 reorder_insns (rtx from, rtx to, rtx after)
3682 rtx prev = PREV_INSN (from);
3683 basic_block bb, bb2;
3685 reorder_insns_nobb (from, to, after);
3687 if (!BARRIER_P (after)
3688 && (bb = BLOCK_FOR_INSN (after)))
3690 rtx x;
3691 bb->flags |= BB_DIRTY;
3693 if (!BARRIER_P (from)
3694 && (bb2 = BLOCK_FOR_INSN (from)))
3696 if (BB_END (bb2) == to)
3697 BB_END (bb2) = prev;
3698 bb2->flags |= BB_DIRTY;
3701 if (BB_END (bb) == after)
3702 BB_END (bb) = to;
3704 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3705 if (!BARRIER_P (x))
3706 set_block_for_insn (x, bb);
3710 /* Return the line note insn preceding INSN. */
3712 static rtx
3713 find_line_note (rtx insn)
3715 if (no_line_numbers)
3716 return 0;
3718 for (; insn; insn = PREV_INSN (insn))
3719 if (NOTE_P (insn)
3720 && NOTE_LINE_NUMBER (insn) >= 0)
3721 break;
3723 return insn;
3726 /* Remove unnecessary notes from the instruction stream. */
3728 void
3729 remove_unnecessary_notes (void)
3731 rtx block_stack = NULL_RTX;
3732 rtx eh_stack = NULL_RTX;
3733 rtx insn;
3734 rtx next;
3735 rtx tmp;
3737 /* We must not remove the first instruction in the function because
3738 the compiler depends on the first instruction being a note. */
3739 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3741 /* Remember what's next. */
3742 next = NEXT_INSN (insn);
3744 /* We're only interested in notes. */
3745 if (!NOTE_P (insn))
3746 continue;
3748 switch (NOTE_LINE_NUMBER (insn))
3750 case NOTE_INSN_DELETED:
3751 remove_insn (insn);
3752 break;
3754 case NOTE_INSN_EH_REGION_BEG:
3755 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3756 break;
3758 case NOTE_INSN_EH_REGION_END:
3759 /* Too many end notes. */
3760 gcc_assert (eh_stack);
3761 /* Mismatched nesting. */
3762 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3763 == NOTE_EH_HANDLER (insn));
3764 tmp = eh_stack;
3765 eh_stack = XEXP (eh_stack, 1);
3766 free_INSN_LIST_node (tmp);
3767 break;
3769 case NOTE_INSN_BLOCK_BEG:
3770 /* By now, all notes indicating lexical blocks should have
3771 NOTE_BLOCK filled in. */
3772 gcc_assert (NOTE_BLOCK (insn));
3773 block_stack = alloc_INSN_LIST (insn, block_stack);
3774 break;
3776 case NOTE_INSN_BLOCK_END:
3777 /* Too many end notes. */
3778 gcc_assert (block_stack);
3779 /* Mismatched nesting. */
3780 gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn));
3781 tmp = block_stack;
3782 block_stack = XEXP (block_stack, 1);
3783 free_INSN_LIST_node (tmp);
3785 /* Scan back to see if there are any non-note instructions
3786 between INSN and the beginning of this block. If not,
3787 then there is no PC range in the generated code that will
3788 actually be in this block, so there's no point in
3789 remembering the existence of the block. */
3790 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3792 /* This block contains a real instruction. Note that we
3793 don't include labels; if the only thing in the block
3794 is a label, then there are still no PC values that
3795 lie within the block. */
3796 if (INSN_P (tmp))
3797 break;
3799 /* We're only interested in NOTEs. */
3800 if (!NOTE_P (tmp))
3801 continue;
3803 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3805 /* We just verified that this BLOCK matches us with
3806 the block_stack check above. Never delete the
3807 BLOCK for the outermost scope of the function; we
3808 can refer to names from that scope even if the
3809 block notes are messed up. */
3810 if (! is_body_block (NOTE_BLOCK (insn))
3811 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3813 remove_insn (tmp);
3814 remove_insn (insn);
3816 break;
3818 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3819 /* There's a nested block. We need to leave the
3820 current block in place since otherwise the debugger
3821 wouldn't be able to show symbols from our block in
3822 the nested block. */
3823 break;
3828 /* Too many begin notes. */
3829 gcc_assert (!block_stack && !eh_stack);
3833 /* Emit insn(s) of given code and pattern
3834 at a specified place within the doubly-linked list.
3836 All of the emit_foo global entry points accept an object
3837 X which is either an insn list or a PATTERN of a single
3838 instruction.
3840 There are thus a few canonical ways to generate code and
3841 emit it at a specific place in the instruction stream. For
3842 example, consider the instruction named SPOT and the fact that
3843 we would like to emit some instructions before SPOT. We might
3844 do it like this:
3846 start_sequence ();
3847 ... emit the new instructions ...
3848 insns_head = get_insns ();
3849 end_sequence ();
3851 emit_insn_before (insns_head, SPOT);
3853 It used to be common to generate SEQUENCE rtl instead, but that
3854 is a relic of the past which no longer occurs. The reason is that
3855 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3856 generated would almost certainly die right after it was created. */
3858 /* Make X be output before the instruction BEFORE. */
3861 emit_insn_before_noloc (rtx x, rtx before)
3863 rtx last = before;
3864 rtx insn;
3866 gcc_assert (before);
3868 if (x == NULL_RTX)
3869 return last;
3871 switch (GET_CODE (x))
3873 case INSN:
3874 case JUMP_INSN:
3875 case CALL_INSN:
3876 case CODE_LABEL:
3877 case BARRIER:
3878 case NOTE:
3879 insn = x;
3880 while (insn)
3882 rtx next = NEXT_INSN (insn);
3883 add_insn_before (insn, before);
3884 last = insn;
3885 insn = next;
3887 break;
3889 #ifdef ENABLE_RTL_CHECKING
3890 case SEQUENCE:
3891 gcc_unreachable ();
3892 break;
3893 #endif
3895 default:
3896 last = make_insn_raw (x);
3897 add_insn_before (last, before);
3898 break;
3901 return last;
3904 /* Make an instruction with body X and code JUMP_INSN
3905 and output it before the instruction BEFORE. */
3908 emit_jump_insn_before_noloc (rtx x, rtx before)
3910 rtx insn, last = NULL_RTX;
3912 gcc_assert (before);
3914 switch (GET_CODE (x))
3916 case INSN:
3917 case JUMP_INSN:
3918 case CALL_INSN:
3919 case CODE_LABEL:
3920 case BARRIER:
3921 case NOTE:
3922 insn = x;
3923 while (insn)
3925 rtx next = NEXT_INSN (insn);
3926 add_insn_before (insn, before);
3927 last = insn;
3928 insn = next;
3930 break;
3932 #ifdef ENABLE_RTL_CHECKING
3933 case SEQUENCE:
3934 gcc_unreachable ();
3935 break;
3936 #endif
3938 default:
3939 last = make_jump_insn_raw (x);
3940 add_insn_before (last, before);
3941 break;
3944 return last;
3947 /* Make an instruction with body X and code CALL_INSN
3948 and output it before the instruction BEFORE. */
3951 emit_call_insn_before_noloc (rtx x, rtx before)
3953 rtx last = NULL_RTX, insn;
3955 gcc_assert (before);
3957 switch (GET_CODE (x))
3959 case INSN:
3960 case JUMP_INSN:
3961 case CALL_INSN:
3962 case CODE_LABEL:
3963 case BARRIER:
3964 case NOTE:
3965 insn = x;
3966 while (insn)
3968 rtx next = NEXT_INSN (insn);
3969 add_insn_before (insn, before);
3970 last = insn;
3971 insn = next;
3973 break;
3975 #ifdef ENABLE_RTL_CHECKING
3976 case SEQUENCE:
3977 gcc_unreachable ();
3978 break;
3979 #endif
3981 default:
3982 last = make_call_insn_raw (x);
3983 add_insn_before (last, before);
3984 break;
3987 return last;
3990 /* Make an insn of code BARRIER
3991 and output it before the insn BEFORE. */
3994 emit_barrier_before (rtx before)
3996 rtx insn = rtx_alloc (BARRIER);
3998 INSN_UID (insn) = cur_insn_uid++;
4000 add_insn_before (insn, before);
4001 return insn;
4004 /* Emit the label LABEL before the insn BEFORE. */
4007 emit_label_before (rtx label, rtx before)
4009 /* This can be called twice for the same label as a result of the
4010 confusion that follows a syntax error! So make it harmless. */
4011 if (INSN_UID (label) == 0)
4013 INSN_UID (label) = cur_insn_uid++;
4014 add_insn_before (label, before);
4017 return label;
4020 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4023 emit_note_before (int subtype, rtx before)
4025 rtx note = rtx_alloc (NOTE);
4026 INSN_UID (note) = cur_insn_uid++;
4027 #ifndef USE_MAPPED_LOCATION
4028 NOTE_SOURCE_FILE (note) = 0;
4029 #endif
4030 NOTE_LINE_NUMBER (note) = subtype;
4031 BLOCK_FOR_INSN (note) = NULL;
4033 add_insn_before (note, before);
4034 return note;
4037 /* Helper for emit_insn_after, handles lists of instructions
4038 efficiently. */
4040 static rtx emit_insn_after_1 (rtx, rtx);
4042 static rtx
4043 emit_insn_after_1 (rtx first, rtx after)
4045 rtx last;
4046 rtx after_after;
4047 basic_block bb;
4049 if (!BARRIER_P (after)
4050 && (bb = BLOCK_FOR_INSN (after)))
4052 bb->flags |= BB_DIRTY;
4053 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4054 if (!BARRIER_P (last))
4055 set_block_for_insn (last, bb);
4056 if (!BARRIER_P (last))
4057 set_block_for_insn (last, bb);
4058 if (BB_END (bb) == after)
4059 BB_END (bb) = last;
4061 else
4062 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4063 continue;
4065 after_after = NEXT_INSN (after);
4067 NEXT_INSN (after) = first;
4068 PREV_INSN (first) = after;
4069 NEXT_INSN (last) = after_after;
4070 if (after_after)
4071 PREV_INSN (after_after) = last;
4073 if (after == last_insn)
4074 last_insn = last;
4075 return last;
4078 /* Make X be output after the insn AFTER. */
4081 emit_insn_after_noloc (rtx x, rtx after)
4083 rtx last = after;
4085 gcc_assert (after);
4087 if (x == NULL_RTX)
4088 return last;
4090 switch (GET_CODE (x))
4092 case INSN:
4093 case JUMP_INSN:
4094 case CALL_INSN:
4095 case CODE_LABEL:
4096 case BARRIER:
4097 case NOTE:
4098 last = emit_insn_after_1 (x, after);
4099 break;
4101 #ifdef ENABLE_RTL_CHECKING
4102 case SEQUENCE:
4103 gcc_unreachable ();
4104 break;
4105 #endif
4107 default:
4108 last = make_insn_raw (x);
4109 add_insn_after (last, after);
4110 break;
4113 return last;
4116 /* Similar to emit_insn_after, except that line notes are to be inserted so
4117 as to act as if this insn were at FROM. */
4119 void
4120 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4122 rtx from_line = find_line_note (from);
4123 rtx after_line = find_line_note (after);
4124 rtx insn = emit_insn_after (x, after);
4126 if (from_line)
4127 emit_note_copy_after (from_line, after);
4129 if (after_line)
4130 emit_note_copy_after (after_line, insn);
4133 /* Make an insn of code JUMP_INSN with body X
4134 and output it after the insn AFTER. */
4137 emit_jump_insn_after_noloc (rtx x, rtx after)
4139 rtx last;
4141 gcc_assert (after);
4143 switch (GET_CODE (x))
4145 case INSN:
4146 case JUMP_INSN:
4147 case CALL_INSN:
4148 case CODE_LABEL:
4149 case BARRIER:
4150 case NOTE:
4151 last = emit_insn_after_1 (x, after);
4152 break;
4154 #ifdef ENABLE_RTL_CHECKING
4155 case SEQUENCE:
4156 gcc_unreachable ();
4157 break;
4158 #endif
4160 default:
4161 last = make_jump_insn_raw (x);
4162 add_insn_after (last, after);
4163 break;
4166 return last;
4169 /* Make an instruction with body X and code CALL_INSN
4170 and output it after the instruction AFTER. */
4173 emit_call_insn_after_noloc (rtx x, rtx after)
4175 rtx last;
4177 gcc_assert (after);
4179 switch (GET_CODE (x))
4181 case INSN:
4182 case JUMP_INSN:
4183 case CALL_INSN:
4184 case CODE_LABEL:
4185 case BARRIER:
4186 case NOTE:
4187 last = emit_insn_after_1 (x, after);
4188 break;
4190 #ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
4192 gcc_unreachable ();
4193 break;
4194 #endif
4196 default:
4197 last = make_call_insn_raw (x);
4198 add_insn_after (last, after);
4199 break;
4202 return last;
4205 /* Make an insn of code BARRIER
4206 and output it after the insn AFTER. */
4209 emit_barrier_after (rtx after)
4211 rtx insn = rtx_alloc (BARRIER);
4213 INSN_UID (insn) = cur_insn_uid++;
4215 add_insn_after (insn, after);
4216 return insn;
4219 /* Emit the label LABEL after the insn AFTER. */
4222 emit_label_after (rtx label, rtx after)
4224 /* This can be called twice for the same label
4225 as a result of the confusion that follows a syntax error!
4226 So make it harmless. */
4227 if (INSN_UID (label) == 0)
4229 INSN_UID (label) = cur_insn_uid++;
4230 add_insn_after (label, after);
4233 return label;
4236 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4239 emit_note_after (int subtype, rtx after)
4241 rtx note = rtx_alloc (NOTE);
4242 INSN_UID (note) = cur_insn_uid++;
4243 #ifndef USE_MAPPED_LOCATION
4244 NOTE_SOURCE_FILE (note) = 0;
4245 #endif
4246 NOTE_LINE_NUMBER (note) = subtype;
4247 BLOCK_FOR_INSN (note) = NULL;
4248 add_insn_after (note, after);
4249 return note;
4252 /* Emit a copy of note ORIG after the insn AFTER. */
4255 emit_note_copy_after (rtx orig, rtx after)
4257 rtx note;
4259 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4261 cur_insn_uid++;
4262 return 0;
4265 note = rtx_alloc (NOTE);
4266 INSN_UID (note) = cur_insn_uid++;
4267 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4268 NOTE_DATA (note) = NOTE_DATA (orig);
4269 BLOCK_FOR_INSN (note) = NULL;
4270 add_insn_after (note, after);
4271 return note;
4274 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4276 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4278 rtx last = emit_insn_after_noloc (pattern, after);
4280 if (pattern == NULL_RTX || !loc)
4281 return last;
4283 after = NEXT_INSN (after);
4284 while (1)
4286 if (active_insn_p (after) && !INSN_LOCATOR (after))
4287 INSN_LOCATOR (after) = loc;
4288 if (after == last)
4289 break;
4290 after = NEXT_INSN (after);
4292 return last;
4295 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4297 emit_insn_after (rtx pattern, rtx after)
4299 if (INSN_P (after))
4300 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4301 else
4302 return emit_insn_after_noloc (pattern, after);
4305 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4307 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4309 rtx last = emit_jump_insn_after_noloc (pattern, after);
4311 if (pattern == NULL_RTX || !loc)
4312 return last;
4314 after = NEXT_INSN (after);
4315 while (1)
4317 if (active_insn_p (after) && !INSN_LOCATOR (after))
4318 INSN_LOCATOR (after) = loc;
4319 if (after == last)
4320 break;
4321 after = NEXT_INSN (after);
4323 return last;
4326 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4328 emit_jump_insn_after (rtx pattern, rtx after)
4330 if (INSN_P (after))
4331 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4332 else
4333 return emit_jump_insn_after_noloc (pattern, after);
4336 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4338 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4340 rtx last = emit_call_insn_after_noloc (pattern, after);
4342 if (pattern == NULL_RTX || !loc)
4343 return last;
4345 after = NEXT_INSN (after);
4346 while (1)
4348 if (active_insn_p (after) && !INSN_LOCATOR (after))
4349 INSN_LOCATOR (after) = loc;
4350 if (after == last)
4351 break;
4352 after = NEXT_INSN (after);
4354 return last;
4357 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4359 emit_call_insn_after (rtx pattern, rtx after)
4361 if (INSN_P (after))
4362 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4363 else
4364 return emit_call_insn_after_noloc (pattern, after);
4367 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4369 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4371 rtx first = PREV_INSN (before);
4372 rtx last = emit_insn_before_noloc (pattern, before);
4374 if (pattern == NULL_RTX || !loc)
4375 return last;
4377 first = NEXT_INSN (first);
4378 while (1)
4380 if (active_insn_p (first) && !INSN_LOCATOR (first))
4381 INSN_LOCATOR (first) = loc;
4382 if (first == last)
4383 break;
4384 first = NEXT_INSN (first);
4386 return last;
4389 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4391 emit_insn_before (rtx pattern, rtx before)
4393 if (INSN_P (before))
4394 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4395 else
4396 return emit_insn_before_noloc (pattern, before);
4399 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4401 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4403 rtx first = PREV_INSN (before);
4404 rtx last = emit_jump_insn_before_noloc (pattern, before);
4406 if (pattern == NULL_RTX)
4407 return last;
4409 first = NEXT_INSN (first);
4410 while (1)
4412 if (active_insn_p (first) && !INSN_LOCATOR (first))
4413 INSN_LOCATOR (first) = loc;
4414 if (first == last)
4415 break;
4416 first = NEXT_INSN (first);
4418 return last;
4421 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4423 emit_jump_insn_before (rtx pattern, rtx before)
4425 if (INSN_P (before))
4426 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4427 else
4428 return emit_jump_insn_before_noloc (pattern, before);
4431 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4433 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4435 rtx first = PREV_INSN (before);
4436 rtx last = emit_call_insn_before_noloc (pattern, before);
4438 if (pattern == NULL_RTX)
4439 return last;
4441 first = NEXT_INSN (first);
4442 while (1)
4444 if (active_insn_p (first) && !INSN_LOCATOR (first))
4445 INSN_LOCATOR (first) = loc;
4446 if (first == last)
4447 break;
4448 first = NEXT_INSN (first);
4450 return last;
4453 /* like emit_call_insn_before_noloc,
4454 but set insn_locator according to before. */
4456 emit_call_insn_before (rtx pattern, rtx before)
4458 if (INSN_P (before))
4459 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4460 else
4461 return emit_call_insn_before_noloc (pattern, before);
4464 /* Take X and emit it at the end of the doubly-linked
4465 INSN list.
4467 Returns the last insn emitted. */
4470 emit_insn (rtx x)
4472 rtx last = last_insn;
4473 rtx insn;
4475 if (x == NULL_RTX)
4476 return last;
4478 switch (GET_CODE (x))
4480 case INSN:
4481 case JUMP_INSN:
4482 case CALL_INSN:
4483 case CODE_LABEL:
4484 case BARRIER:
4485 case NOTE:
4486 insn = x;
4487 while (insn)
4489 rtx next = NEXT_INSN (insn);
4490 add_insn (insn);
4491 last = insn;
4492 insn = next;
4494 break;
4496 #ifdef ENABLE_RTL_CHECKING
4497 case SEQUENCE:
4498 gcc_unreachable ();
4499 break;
4500 #endif
4502 default:
4503 last = make_insn_raw (x);
4504 add_insn (last);
4505 break;
4508 return last;
4511 /* Make an insn of code JUMP_INSN with pattern X
4512 and add it to the end of the doubly-linked list. */
4515 emit_jump_insn (rtx x)
4517 rtx last = NULL_RTX, insn;
4519 switch (GET_CODE (x))
4521 case INSN:
4522 case JUMP_INSN:
4523 case CALL_INSN:
4524 case CODE_LABEL:
4525 case BARRIER:
4526 case NOTE:
4527 insn = x;
4528 while (insn)
4530 rtx next = NEXT_INSN (insn);
4531 add_insn (insn);
4532 last = insn;
4533 insn = next;
4535 break;
4537 #ifdef ENABLE_RTL_CHECKING
4538 case SEQUENCE:
4539 gcc_unreachable ();
4540 break;
4541 #endif
4543 default:
4544 last = make_jump_insn_raw (x);
4545 add_insn (last);
4546 break;
4549 return last;
4552 /* Make an insn of code CALL_INSN with pattern X
4553 and add it to the end of the doubly-linked list. */
4556 emit_call_insn (rtx x)
4558 rtx insn;
4560 switch (GET_CODE (x))
4562 case INSN:
4563 case JUMP_INSN:
4564 case CALL_INSN:
4565 case CODE_LABEL:
4566 case BARRIER:
4567 case NOTE:
4568 insn = emit_insn (x);
4569 break;
4571 #ifdef ENABLE_RTL_CHECKING
4572 case SEQUENCE:
4573 gcc_unreachable ();
4574 break;
4575 #endif
4577 default:
4578 insn = make_call_insn_raw (x);
4579 add_insn (insn);
4580 break;
4583 return insn;
4586 /* Add the label LABEL to the end of the doubly-linked list. */
4589 emit_label (rtx label)
4591 /* This can be called twice for the same label
4592 as a result of the confusion that follows a syntax error!
4593 So make it harmless. */
4594 if (INSN_UID (label) == 0)
4596 INSN_UID (label) = cur_insn_uid++;
4597 add_insn (label);
4599 return label;
4602 /* Make an insn of code BARRIER
4603 and add it to the end of the doubly-linked list. */
4606 emit_barrier (void)
4608 rtx barrier = rtx_alloc (BARRIER);
4609 INSN_UID (barrier) = cur_insn_uid++;
4610 add_insn (barrier);
4611 return barrier;
4614 /* Make line numbering NOTE insn for LOCATION add it to the end
4615 of the doubly-linked list, but only if line-numbers are desired for
4616 debugging info and it doesn't match the previous one. */
4619 emit_line_note (location_t location)
4621 rtx note;
4623 #ifdef USE_MAPPED_LOCATION
4624 if (location == last_location)
4625 return NULL_RTX;
4626 #else
4627 if (location.file && last_location.file
4628 && !strcmp (location.file, last_location.file)
4629 && location.line == last_location.line)
4630 return NULL_RTX;
4631 #endif
4632 last_location = location;
4634 if (no_line_numbers)
4636 cur_insn_uid++;
4637 return NULL_RTX;
4640 #ifdef USE_MAPPED_LOCATION
4641 note = emit_note ((int) location);
4642 #else
4643 note = emit_note (location.line);
4644 NOTE_SOURCE_FILE (note) = location.file;
4645 #endif
4647 return note;
4650 /* Emit a copy of note ORIG. */
4653 emit_note_copy (rtx orig)
4655 rtx note;
4657 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4659 cur_insn_uid++;
4660 return NULL_RTX;
4663 note = rtx_alloc (NOTE);
4665 INSN_UID (note) = cur_insn_uid++;
4666 NOTE_DATA (note) = NOTE_DATA (orig);
4667 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4668 BLOCK_FOR_INSN (note) = NULL;
4669 add_insn (note);
4671 return note;
4674 /* Make an insn of code NOTE or type NOTE_NO
4675 and add it to the end of the doubly-linked list. */
4678 emit_note (int note_no)
4680 rtx note;
4682 note = rtx_alloc (NOTE);
4683 INSN_UID (note) = cur_insn_uid++;
4684 NOTE_LINE_NUMBER (note) = note_no;
4685 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4686 BLOCK_FOR_INSN (note) = NULL;
4687 add_insn (note);
4688 return note;
4691 /* Cause next statement to emit a line note even if the line number
4692 has not changed. */
4694 void
4695 force_next_line_note (void)
4697 #ifdef USE_MAPPED_LOCATION
4698 last_location = -1;
4699 #else
4700 last_location.line = -1;
4701 #endif
4704 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4705 note of this type already exists, remove it first. */
4708 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4710 rtx note = find_reg_note (insn, kind, NULL_RTX);
4712 switch (kind)
4714 case REG_EQUAL:
4715 case REG_EQUIV:
4716 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4717 has multiple sets (some callers assume single_set
4718 means the insn only has one set, when in fact it
4719 means the insn only has one * useful * set). */
4720 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4722 gcc_assert (!note);
4723 return NULL_RTX;
4726 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4727 It serves no useful purpose and breaks eliminate_regs. */
4728 if (GET_CODE (datum) == ASM_OPERANDS)
4729 return NULL_RTX;
4730 break;
4732 default:
4733 break;
4736 if (note)
4738 XEXP (note, 0) = datum;
4739 return note;
4742 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4743 return REG_NOTES (insn);
4746 /* Return an indication of which type of insn should have X as a body.
4747 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4749 static enum rtx_code
4750 classify_insn (rtx x)
4752 if (LABEL_P (x))
4753 return CODE_LABEL;
4754 if (GET_CODE (x) == CALL)
4755 return CALL_INSN;
4756 if (GET_CODE (x) == RETURN)
4757 return JUMP_INSN;
4758 if (GET_CODE (x) == SET)
4760 if (SET_DEST (x) == pc_rtx)
4761 return JUMP_INSN;
4762 else if (GET_CODE (SET_SRC (x)) == CALL)
4763 return CALL_INSN;
4764 else
4765 return INSN;
4767 if (GET_CODE (x) == PARALLEL)
4769 int j;
4770 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4771 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4772 return CALL_INSN;
4773 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4774 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4775 return JUMP_INSN;
4776 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4777 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4778 return CALL_INSN;
4780 return INSN;
4783 /* Emit the rtl pattern X as an appropriate kind of insn.
4784 If X is a label, it is simply added into the insn chain. */
4787 emit (rtx x)
4789 enum rtx_code code = classify_insn (x);
4791 switch (code)
4793 case CODE_LABEL:
4794 return emit_label (x);
4795 case INSN:
4796 return emit_insn (x);
4797 case JUMP_INSN:
4799 rtx insn = emit_jump_insn (x);
4800 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4801 return emit_barrier ();
4802 return insn;
4804 case CALL_INSN:
4805 return emit_call_insn (x);
4806 default:
4807 gcc_unreachable ();
4811 /* Space for free sequence stack entries. */
4812 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4814 /* Begin emitting insns to a sequence. If this sequence will contain
4815 something that might cause the compiler to pop arguments to function
4816 calls (because those pops have previously been deferred; see
4817 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4818 before calling this function. That will ensure that the deferred
4819 pops are not accidentally emitted in the middle of this sequence. */
4821 void
4822 start_sequence (void)
4824 struct sequence_stack *tem;
4826 if (free_sequence_stack != NULL)
4828 tem = free_sequence_stack;
4829 free_sequence_stack = tem->next;
4831 else
4832 tem = ggc_alloc (sizeof (struct sequence_stack));
4834 tem->next = seq_stack;
4835 tem->first = first_insn;
4836 tem->last = last_insn;
4838 seq_stack = tem;
4840 first_insn = 0;
4841 last_insn = 0;
4844 /* Set up the insn chain starting with FIRST as the current sequence,
4845 saving the previously current one. See the documentation for
4846 start_sequence for more information about how to use this function. */
4848 void
4849 push_to_sequence (rtx first)
4851 rtx last;
4853 start_sequence ();
4855 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4857 first_insn = first;
4858 last_insn = last;
4861 /* Set up the outer-level insn chain
4862 as the current sequence, saving the previously current one. */
4864 void
4865 push_topmost_sequence (void)
4867 struct sequence_stack *stack, *top = NULL;
4869 start_sequence ();
4871 for (stack = seq_stack; stack; stack = stack->next)
4872 top = stack;
4874 first_insn = top->first;
4875 last_insn = top->last;
4878 /* After emitting to the outer-level insn chain, update the outer-level
4879 insn chain, and restore the previous saved state. */
4881 void
4882 pop_topmost_sequence (void)
4884 struct sequence_stack *stack, *top = NULL;
4886 for (stack = seq_stack; stack; stack = stack->next)
4887 top = stack;
4889 top->first = first_insn;
4890 top->last = last_insn;
4892 end_sequence ();
4895 /* After emitting to a sequence, restore previous saved state.
4897 To get the contents of the sequence just made, you must call
4898 `get_insns' *before* calling here.
4900 If the compiler might have deferred popping arguments while
4901 generating this sequence, and this sequence will not be immediately
4902 inserted into the instruction stream, use do_pending_stack_adjust
4903 before calling get_insns. That will ensure that the deferred
4904 pops are inserted into this sequence, and not into some random
4905 location in the instruction stream. See INHIBIT_DEFER_POP for more
4906 information about deferred popping of arguments. */
4908 void
4909 end_sequence (void)
4911 struct sequence_stack *tem = seq_stack;
4913 first_insn = tem->first;
4914 last_insn = tem->last;
4915 seq_stack = tem->next;
4917 memset (tem, 0, sizeof (*tem));
4918 tem->next = free_sequence_stack;
4919 free_sequence_stack = tem;
4922 /* Return 1 if currently emitting into a sequence. */
4925 in_sequence_p (void)
4927 return seq_stack != 0;
4930 /* Put the various virtual registers into REGNO_REG_RTX. */
4932 void
4933 init_virtual_regs (struct emit_status *es)
4935 rtx *ptr = es->x_regno_reg_rtx;
4936 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4937 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4938 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4939 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4940 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4944 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4945 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4946 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4947 static int copy_insn_n_scratches;
4949 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4950 copied an ASM_OPERANDS.
4951 In that case, it is the original input-operand vector. */
4952 static rtvec orig_asm_operands_vector;
4954 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4955 copied an ASM_OPERANDS.
4956 In that case, it is the copied input-operand vector. */
4957 static rtvec copy_asm_operands_vector;
4959 /* Likewise for the constraints vector. */
4960 static rtvec orig_asm_constraints_vector;
4961 static rtvec copy_asm_constraints_vector;
4963 /* Recursively create a new copy of an rtx for copy_insn.
4964 This function differs from copy_rtx in that it handles SCRATCHes and
4965 ASM_OPERANDs properly.
4966 Normally, this function is not used directly; use copy_insn as front end.
4967 However, you could first copy an insn pattern with copy_insn and then use
4968 this function afterwards to properly copy any REG_NOTEs containing
4969 SCRATCHes. */
4972 copy_insn_1 (rtx orig)
4974 rtx copy;
4975 int i, j;
4976 RTX_CODE code;
4977 const char *format_ptr;
4979 code = GET_CODE (orig);
4981 switch (code)
4983 case REG:
4984 case CONST_INT:
4985 case CONST_DOUBLE:
4986 case CONST_VECTOR:
4987 case SYMBOL_REF:
4988 case CODE_LABEL:
4989 case PC:
4990 case CC0:
4991 return orig;
4992 case CLOBBER:
4993 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4994 return orig;
4995 break;
4997 case SCRATCH:
4998 for (i = 0; i < copy_insn_n_scratches; i++)
4999 if (copy_insn_scratch_in[i] == orig)
5000 return copy_insn_scratch_out[i];
5001 break;
5003 case CONST:
5004 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5005 a LABEL_REF, it isn't sharable. */
5006 if (GET_CODE (XEXP (orig, 0)) == PLUS
5007 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5008 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5009 return orig;
5010 break;
5012 /* A MEM with a constant address is not sharable. The problem is that
5013 the constant address may need to be reloaded. If the mem is shared,
5014 then reloading one copy of this mem will cause all copies to appear
5015 to have been reloaded. */
5017 default:
5018 break;
5021 copy = rtx_alloc (code);
5023 /* Copy the various flags, and other information. We assume that
5024 all fields need copying, and then clear the fields that should
5025 not be copied. That is the sensible default behavior, and forces
5026 us to explicitly document why we are *not* copying a flag. */
5027 memcpy (copy, orig, RTX_HDR_SIZE);
5029 /* We do not copy the USED flag, which is used as a mark bit during
5030 walks over the RTL. */
5031 RTX_FLAG (copy, used) = 0;
5033 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5034 if (INSN_P (orig))
5036 RTX_FLAG (copy, jump) = 0;
5037 RTX_FLAG (copy, call) = 0;
5038 RTX_FLAG (copy, frame_related) = 0;
5041 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5043 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5045 copy->u.fld[i] = orig->u.fld[i];
5046 switch (*format_ptr++)
5048 case 'e':
5049 if (XEXP (orig, i) != NULL)
5050 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5051 break;
5053 case 'E':
5054 case 'V':
5055 if (XVEC (orig, i) == orig_asm_constraints_vector)
5056 XVEC (copy, i) = copy_asm_constraints_vector;
5057 else if (XVEC (orig, i) == orig_asm_operands_vector)
5058 XVEC (copy, i) = copy_asm_operands_vector;
5059 else if (XVEC (orig, i) != NULL)
5061 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5062 for (j = 0; j < XVECLEN (copy, i); j++)
5063 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5065 break;
5067 case 't':
5068 case 'w':
5069 case 'i':
5070 case 's':
5071 case 'S':
5072 case 'u':
5073 case '0':
5074 /* These are left unchanged. */
5075 break;
5077 default:
5078 gcc_unreachable ();
5082 if (code == SCRATCH)
5084 i = copy_insn_n_scratches++;
5085 gcc_assert (i < MAX_RECOG_OPERANDS);
5086 copy_insn_scratch_in[i] = orig;
5087 copy_insn_scratch_out[i] = copy;
5089 else if (code == ASM_OPERANDS)
5091 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5092 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5093 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5094 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5097 return copy;
5100 /* Create a new copy of an rtx.
5101 This function differs from copy_rtx in that it handles SCRATCHes and
5102 ASM_OPERANDs properly.
5103 INSN doesn't really have to be a full INSN; it could be just the
5104 pattern. */
5106 copy_insn (rtx insn)
5108 copy_insn_n_scratches = 0;
5109 orig_asm_operands_vector = 0;
5110 orig_asm_constraints_vector = 0;
5111 copy_asm_operands_vector = 0;
5112 copy_asm_constraints_vector = 0;
5113 return copy_insn_1 (insn);
5116 /* Initialize data structures and variables in this file
5117 before generating rtl for each function. */
5119 void
5120 init_emit (void)
5122 struct function *f = cfun;
5124 f->emit = ggc_alloc (sizeof (struct emit_status));
5125 first_insn = NULL;
5126 last_insn = NULL;
5127 cur_insn_uid = 1;
5128 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5129 last_location = UNKNOWN_LOCATION;
5130 first_label_num = label_num;
5131 seq_stack = NULL;
5133 /* Init the tables that describe all the pseudo regs. */
5135 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5137 f->emit->regno_pointer_align
5138 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5139 * sizeof (unsigned char));
5141 regno_reg_rtx
5142 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5144 /* Put copies of all the hard registers into regno_reg_rtx. */
5145 memcpy (regno_reg_rtx,
5146 static_regno_reg_rtx,
5147 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5149 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5150 init_virtual_regs (f->emit);
5152 /* Indicate that the virtual registers and stack locations are
5153 all pointers. */
5154 REG_POINTER (stack_pointer_rtx) = 1;
5155 REG_POINTER (frame_pointer_rtx) = 1;
5156 REG_POINTER (hard_frame_pointer_rtx) = 1;
5157 REG_POINTER (arg_pointer_rtx) = 1;
5159 REG_POINTER (virtual_incoming_args_rtx) = 1;
5160 REG_POINTER (virtual_stack_vars_rtx) = 1;
5161 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5162 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5163 REG_POINTER (virtual_cfa_rtx) = 1;
5165 #ifdef STACK_BOUNDARY
5166 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5167 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5168 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5169 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5171 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5172 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5173 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5174 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5175 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5176 #endif
5178 #ifdef INIT_EXPANDERS
5179 INIT_EXPANDERS;
5180 #endif
5183 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5185 static rtx
5186 gen_const_vector (enum machine_mode mode, int constant)
5188 rtx tem;
5189 rtvec v;
5190 int units, i;
5191 enum machine_mode inner;
5193 units = GET_MODE_NUNITS (mode);
5194 inner = GET_MODE_INNER (mode);
5196 v = rtvec_alloc (units);
5198 /* We need to call this function after we set the scalar const_tiny_rtx
5199 entries. */
5200 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5202 for (i = 0; i < units; ++i)
5203 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5205 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5206 return tem;
5209 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5210 all elements are zero, and the one vector when all elements are one. */
5212 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5214 enum machine_mode inner = GET_MODE_INNER (mode);
5215 int nunits = GET_MODE_NUNITS (mode);
5216 rtx x;
5217 int i;
5219 /* Check to see if all of the elements have the same value. */
5220 x = RTVEC_ELT (v, nunits - 1);
5221 for (i = nunits - 2; i >= 0; i--)
5222 if (RTVEC_ELT (v, i) != x)
5223 break;
5225 /* If the values are all the same, check to see if we can use one of the
5226 standard constant vectors. */
5227 if (i == -1)
5229 if (x == CONST0_RTX (inner))
5230 return CONST0_RTX (mode);
5231 else if (x == CONST1_RTX (inner))
5232 return CONST1_RTX (mode);
5235 return gen_rtx_raw_CONST_VECTOR (mode, v);
5238 /* Create some permanent unique rtl objects shared between all functions.
5239 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5241 void
5242 init_emit_once (int line_numbers)
5244 int i;
5245 enum machine_mode mode;
5246 enum machine_mode double_mode;
5248 /* We need reg_raw_mode, so initialize the modes now. */
5249 init_reg_modes_once ();
5251 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5252 tables. */
5253 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5254 const_int_htab_eq, NULL);
5256 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5257 const_double_htab_eq, NULL);
5259 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5260 mem_attrs_htab_eq, NULL);
5261 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5262 reg_attrs_htab_eq, NULL);
5264 no_line_numbers = ! line_numbers;
5266 /* Compute the word and byte modes. */
5268 byte_mode = VOIDmode;
5269 word_mode = VOIDmode;
5270 double_mode = VOIDmode;
5272 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5273 mode = GET_MODE_WIDER_MODE (mode))
5275 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5276 && byte_mode == VOIDmode)
5277 byte_mode = mode;
5279 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5280 && word_mode == VOIDmode)
5281 word_mode = mode;
5284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5285 mode = GET_MODE_WIDER_MODE (mode))
5287 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5288 && double_mode == VOIDmode)
5289 double_mode = mode;
5292 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5294 /* Assign register numbers to the globally defined register rtx.
5295 This must be done at runtime because the register number field
5296 is in a union and some compilers can't initialize unions. */
5298 pc_rtx = gen_rtx_PC (VOIDmode);
5299 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5300 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5301 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5302 if (hard_frame_pointer_rtx == 0)
5303 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5304 HARD_FRAME_POINTER_REGNUM);
5305 if (arg_pointer_rtx == 0)
5306 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5307 virtual_incoming_args_rtx =
5308 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5309 virtual_stack_vars_rtx =
5310 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5311 virtual_stack_dynamic_rtx =
5312 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5313 virtual_outgoing_args_rtx =
5314 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5315 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5317 /* Initialize RTL for commonly used hard registers. These are
5318 copied into regno_reg_rtx as we begin to compile each function. */
5319 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5320 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5322 #ifdef INIT_EXPANDERS
5323 /* This is to initialize {init|mark|free}_machine_status before the first
5324 call to push_function_context_to. This is needed by the Chill front
5325 end which calls push_function_context_to before the first call to
5326 init_function_start. */
5327 INIT_EXPANDERS;
5328 #endif
5330 /* Create the unique rtx's for certain rtx codes and operand values. */
5332 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5333 tries to use these variables. */
5334 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5335 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5336 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5338 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5339 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5340 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5341 else
5342 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5344 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5345 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5346 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5347 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5348 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5349 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5350 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5352 dconsthalf = dconst1;
5353 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5355 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5357 /* Initialize mathematical constants for constant folding builtins.
5358 These constants need to be given to at least 160 bits precision. */
5359 real_from_string (&dconstpi,
5360 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5361 real_from_string (&dconste,
5362 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5364 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5366 REAL_VALUE_TYPE *r =
5367 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5369 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5370 mode = GET_MODE_WIDER_MODE (mode))
5371 const_tiny_rtx[i][(int) mode] =
5372 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5374 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5376 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5377 mode = GET_MODE_WIDER_MODE (mode))
5378 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5381 mode != VOIDmode;
5382 mode = GET_MODE_WIDER_MODE (mode))
5383 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5387 mode != VOIDmode;
5388 mode = GET_MODE_WIDER_MODE (mode))
5390 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5391 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5394 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5395 mode != VOIDmode;
5396 mode = GET_MODE_WIDER_MODE (mode))
5398 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5399 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5402 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5403 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5404 const_tiny_rtx[0][i] = const0_rtx;
5406 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5407 if (STORE_FLAG_VALUE == 1)
5408 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5410 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5411 return_address_pointer_rtx
5412 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5413 #endif
5415 #ifdef STATIC_CHAIN_REGNUM
5416 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5418 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5419 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5420 static_chain_incoming_rtx
5421 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5422 else
5423 #endif
5424 static_chain_incoming_rtx = static_chain_rtx;
5425 #endif
5427 #ifdef STATIC_CHAIN
5428 static_chain_rtx = STATIC_CHAIN;
5430 #ifdef STATIC_CHAIN_INCOMING
5431 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5432 #else
5433 static_chain_incoming_rtx = static_chain_rtx;
5434 #endif
5435 #endif
5437 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5438 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5441 /* Produce exact duplicate of insn INSN after AFTER.
5442 Care updating of libcall regions if present. */
5445 emit_copy_of_insn_after (rtx insn, rtx after)
5447 rtx new;
5448 rtx note1, note2, link;
5450 switch (GET_CODE (insn))
5452 case INSN:
5453 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5454 break;
5456 case JUMP_INSN:
5457 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5458 break;
5460 case CALL_INSN:
5461 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5462 if (CALL_INSN_FUNCTION_USAGE (insn))
5463 CALL_INSN_FUNCTION_USAGE (new)
5464 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5465 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5466 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5467 break;
5469 default:
5470 gcc_unreachable ();
5473 /* Update LABEL_NUSES. */
5474 mark_jump_label (PATTERN (new), new, 0);
5476 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5478 /* If the old insn is frame related, then so is the new one. This is
5479 primarily needed for IA-64 unwind info which marks epilogue insns,
5480 which may be duplicated by the basic block reordering code. */
5481 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5483 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5484 make them. */
5485 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5486 if (REG_NOTE_KIND (link) != REG_LABEL)
5488 if (GET_CODE (link) == EXPR_LIST)
5489 REG_NOTES (new)
5490 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5491 XEXP (link, 0),
5492 REG_NOTES (new)));
5493 else
5494 REG_NOTES (new)
5495 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5496 XEXP (link, 0),
5497 REG_NOTES (new)));
5500 /* Fix the libcall sequences. */
5501 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5503 rtx p = new;
5504 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5505 p = PREV_INSN (p);
5506 XEXP (note1, 0) = p;
5507 XEXP (note2, 0) = new;
5509 INSN_CODE (new) = INSN_CODE (insn);
5510 return new;
5513 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5515 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5517 if (hard_reg_clobbers[mode][regno])
5518 return hard_reg_clobbers[mode][regno];
5519 else
5520 return (hard_reg_clobbers[mode][regno] =
5521 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5524 #include "gt-emit-rtl.h"