* tree-ssa-pre.c (grand_bitmap_obstack): New.
[official-gcc.git] / gcc / emit-rtl.c
blob61b62e5618cf8a37a40785ad99c9c8a3d052f4e1
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
59 /* Commonly used modes. */
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
70 static GTY(()) int label_num = 1;
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
76 static int last_label_num;
78 /* Value label_num had when set_new_last_label_num was called.
79 If label_num has not changed since then, last_label_num is valid. */
81 static int base_label_num;
83 /* Nonzero means do not generate NOTEs for source line numbers. */
85 static int no_line_numbers;
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
92 rtx global_rtl[GR_MAX];
94 /* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
100 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
101 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
102 record a copy of const[012]_rtx. */
104 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
106 rtx const_true_rtx;
108 REAL_VALUE_TYPE dconst0;
109 REAL_VALUE_TYPE dconst1;
110 REAL_VALUE_TYPE dconst2;
111 REAL_VALUE_TYPE dconst3;
112 REAL_VALUE_TYPE dconst10;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
116 REAL_VALUE_TYPE dconstthird;
117 REAL_VALUE_TYPE dconstpi;
118 REAL_VALUE_TYPE dconste;
120 /* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
131 should be used if it is being set, and frame_pointer_rtx otherwise. After
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
138 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
142 /* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
146 /* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
151 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
153 /* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
159 /* A hash table storing memory attribute structures. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
163 /* A hash table storing register attribute structures. */
164 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
167 /* A hash table storing all CONST_DOUBLEs. */
168 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_location (cfun->emit->x_last_location)
175 #define first_label_num (cfun->emit->x_first_label_num)
177 static rtx make_jump_insn_raw (rtx);
178 static rtx make_call_insn_raw (rtx);
179 static rtx find_line_note (rtx);
180 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
181 static void unshare_all_decls (tree);
182 static void reset_used_decls (tree);
183 static void mark_label_nuses (rtx);
184 static hashval_t const_int_htab_hash (const void *);
185 static int const_int_htab_eq (const void *, const void *);
186 static hashval_t const_double_htab_hash (const void *);
187 static int const_double_htab_eq (const void *, const void *);
188 static rtx lookup_const_double (rtx);
189 static hashval_t mem_attrs_htab_hash (const void *);
190 static int mem_attrs_htab_eq (const void *, const void *);
191 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
192 enum machine_mode);
193 static hashval_t reg_attrs_htab_hash (const void *);
194 static int reg_attrs_htab_eq (const void *, const void *);
195 static reg_attrs *get_reg_attrs (tree, int);
196 static tree component_ref_for_mem_expr (tree);
197 static rtx gen_const_vector_0 (enum machine_mode);
198 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
199 static void copy_rtx_if_shared_1 (rtx *orig);
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
207 static hashval_t
208 const_int_htab_hash (const void *x)
210 return (hashval_t) INTVAL ((rtx) x);
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
217 static int
218 const_int_htab_eq (const void *x, const void *y)
220 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
223 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
224 static hashval_t
225 const_double_htab_hash (const void *x)
227 rtx value = (rtx) x;
228 hashval_t h;
230 if (GET_MODE (value) == VOIDmode)
231 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
232 else
234 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
235 /* MODE is used in the comparison, so it should be in the hash. */
236 h ^= GET_MODE (value);
238 return h;
241 /* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...) */
243 static int
244 const_double_htab_eq (const void *x, const void *y)
246 rtx a = (rtx)x, b = (rtx)y;
248 if (GET_MODE (a) != GET_MODE (b))
249 return 0;
250 if (GET_MODE (a) == VOIDmode)
251 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
252 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
253 else
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
255 CONST_DOUBLE_REAL_VALUE (b));
258 /* Returns a hash code for X (which is a really a mem_attrs *). */
260 static hashval_t
261 mem_attrs_htab_hash (const void *x)
263 mem_attrs *p = (mem_attrs *) x;
265 return (p->alias ^ (p->align * 1000)
266 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
267 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
268 ^ (size_t) p->expr);
271 /* Returns nonzero if the value represented by X (which is really a
272 mem_attrs *) is the same as that given by Y (which is also really a
273 mem_attrs *). */
275 static int
276 mem_attrs_htab_eq (const void *x, const void *y)
278 mem_attrs *p = (mem_attrs *) x;
279 mem_attrs *q = (mem_attrs *) y;
281 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
282 && p->size == q->size && p->align == q->align);
285 /* Allocate a new mem_attrs structure and insert it into the hash table if
286 one identical to it is not already in the table. We are doing this for
287 MEM of mode MODE. */
289 static mem_attrs *
290 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
291 unsigned int align, enum machine_mode mode)
293 mem_attrs attrs;
294 void **slot;
296 /* If everything is the default, we can just return zero.
297 This must match what the corresponding MEM_* macros return when the
298 field is not present. */
299 if (alias == 0 && expr == 0 && offset == 0
300 && (size == 0
301 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
302 && (STRICT_ALIGNMENT && mode != BLKmode
303 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
304 return 0;
306 attrs.alias = alias;
307 attrs.expr = expr;
308 attrs.offset = offset;
309 attrs.size = size;
310 attrs.align = align;
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
315 *slot = ggc_alloc (sizeof (mem_attrs));
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 return *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
327 reg_attrs *p = (reg_attrs *) x;
329 return ((p->offset * 1000) ^ (long) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
339 reg_attrs *p = (reg_attrs *) x;
340 reg_attrs *q = (reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
351 reg_attrs attrs;
352 void **slot;
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
358 attrs.decl = decl;
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
364 *slot = ggc_alloc (sizeof (reg_attrs));
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return *slot;
371 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
372 don't attempt to share with the various global pieces of rtl (such as
373 frame_pointer_rtx). */
376 gen_raw_REG (enum machine_mode mode, int regno)
378 rtx x = gen_rtx_raw_REG (mode, regno);
379 ORIGINAL_REGNO (x) = regno;
380 return x;
383 /* There are some RTL codes that require special attention; the generation
384 functions do the raw handling. If you add to this list, modify
385 special_rtx in gengenrtl.c as well. */
388 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
390 void **slot;
392 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
393 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
395 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
396 if (const_true_rtx && arg == STORE_FLAG_VALUE)
397 return const_true_rtx;
398 #endif
400 /* Look up the CONST_INT in the hash table. */
401 slot = htab_find_slot_with_hash (const_int_htab, &arg,
402 (hashval_t) arg, INSERT);
403 if (*slot == 0)
404 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
406 return (rtx) *slot;
410 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
412 return GEN_INT (trunc_int_for_mode (c, mode));
415 /* CONST_DOUBLEs might be created from pairs of integers, or from
416 REAL_VALUE_TYPEs. Also, their length is known only at run time,
417 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
419 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
420 hash table. If so, return its counterpart; otherwise add it
421 to the hash table and return it. */
422 static rtx
423 lookup_const_double (rtx real)
425 void **slot = htab_find_slot (const_double_htab, real, INSERT);
426 if (*slot == 0)
427 *slot = real;
429 return (rtx) *slot;
432 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
433 VALUE in mode MODE. */
435 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
437 rtx real = rtx_alloc (CONST_DOUBLE);
438 PUT_MODE (real, mode);
440 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
442 return lookup_const_double (real);
445 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
446 of ints: I0 is the low-order word and I1 is the high-order word.
447 Do not use this routine for non-integer modes; convert to
448 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
451 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
453 rtx value;
454 unsigned int i;
456 if (mode != VOIDmode)
458 int width;
459 if (GET_MODE_CLASS (mode) != MODE_INT
460 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
461 /* We can get a 0 for an error mark. */
462 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
463 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
464 abort ();
466 /* We clear out all bits that don't belong in MODE, unless they and
467 our sign bit are all one. So we get either a reasonable negative
468 value or a reasonable unsigned value for this mode. */
469 width = GET_MODE_BITSIZE (mode);
470 if (width < HOST_BITS_PER_WIDE_INT
471 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
472 != ((HOST_WIDE_INT) (-1) << (width - 1))))
473 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
474 else if (width == HOST_BITS_PER_WIDE_INT
475 && ! (i1 == ~0 && i0 < 0))
476 i1 = 0;
477 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
478 /* We cannot represent this value as a constant. */
479 abort ();
481 /* If this would be an entire word for the target, but is not for
482 the host, then sign-extend on the host so that the number will
483 look the same way on the host that it would on the target.
485 For example, when building a 64 bit alpha hosted 32 bit sparc
486 targeted compiler, then we want the 32 bit unsigned value -1 to be
487 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
488 The latter confuses the sparc backend. */
490 if (width < HOST_BITS_PER_WIDE_INT
491 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
492 i0 |= ((HOST_WIDE_INT) (-1) << width);
494 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
495 CONST_INT.
497 ??? Strictly speaking, this is wrong if we create a CONST_INT for
498 a large unsigned constant with the size of MODE being
499 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
500 in a wider mode. In that case we will mis-interpret it as a
501 negative number.
503 Unfortunately, the only alternative is to make a CONST_DOUBLE for
504 any constant in any mode if it is an unsigned constant larger
505 than the maximum signed integer in an int on the host. However,
506 doing this will break everyone that always expects to see a
507 CONST_INT for SImode and smaller.
509 We have always been making CONST_INTs in this case, so nothing
510 new is being broken. */
512 if (width <= HOST_BITS_PER_WIDE_INT)
513 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
516 /* If this integer fits in one word, return a CONST_INT. */
517 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
518 return GEN_INT (i0);
520 /* We use VOIDmode for integers. */
521 value = rtx_alloc (CONST_DOUBLE);
522 PUT_MODE (value, VOIDmode);
524 CONST_DOUBLE_LOW (value) = i0;
525 CONST_DOUBLE_HIGH (value) = i1;
527 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
528 XWINT (value, i) = 0;
530 return lookup_const_double (value);
534 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
536 /* In case the MD file explicitly references the frame pointer, have
537 all such references point to the same frame pointer. This is
538 used during frame pointer elimination to distinguish the explicit
539 references to these registers from pseudos that happened to be
540 assigned to them.
542 If we have eliminated the frame pointer or arg pointer, we will
543 be using it as a normal register, for example as a spill
544 register. In such cases, we might be accessing it in a mode that
545 is not Pmode and therefore cannot use the pre-allocated rtx.
547 Also don't do this when we are making new REGs in reload, since
548 we don't want to get confused with the real pointers. */
550 if (mode == Pmode && !reload_in_progress)
552 if (regno == FRAME_POINTER_REGNUM
553 && (!reload_completed || frame_pointer_needed))
554 return frame_pointer_rtx;
555 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
556 if (regno == HARD_FRAME_POINTER_REGNUM
557 && (!reload_completed || frame_pointer_needed))
558 return hard_frame_pointer_rtx;
559 #endif
560 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
561 if (regno == ARG_POINTER_REGNUM)
562 return arg_pointer_rtx;
563 #endif
564 #ifdef RETURN_ADDRESS_POINTER_REGNUM
565 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
566 return return_address_pointer_rtx;
567 #endif
568 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
569 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
570 return pic_offset_table_rtx;
571 if (regno == STACK_POINTER_REGNUM)
572 return stack_pointer_rtx;
575 #if 0
576 /* If the per-function register table has been set up, try to re-use
577 an existing entry in that table to avoid useless generation of RTL.
579 This code is disabled for now until we can fix the various backends
580 which depend on having non-shared hard registers in some cases. Long
581 term we want to re-enable this code as it can significantly cut down
582 on the amount of useless RTL that gets generated.
584 We'll also need to fix some code that runs after reload that wants to
585 set ORIGINAL_REGNO. */
587 if (cfun
588 && cfun->emit
589 && regno_reg_rtx
590 && regno < FIRST_PSEUDO_REGISTER
591 && reg_raw_mode[regno] == mode)
592 return regno_reg_rtx[regno];
593 #endif
595 return gen_raw_REG (mode, regno);
599 gen_rtx_MEM (enum machine_mode mode, rtx addr)
601 rtx rt = gen_rtx_raw_MEM (mode, addr);
603 /* This field is not cleared by the mere allocation of the rtx, so
604 we clear it here. */
605 MEM_ATTRS (rt) = 0;
607 return rt;
610 /* Generate a memory referring to non-trapping constant memory. */
613 gen_const_mem (enum machine_mode mode, rtx addr)
615 rtx mem = gen_rtx_MEM (mode, addr);
616 MEM_READONLY_P (mem) = 1;
617 MEM_NOTRAP_P (mem) = 1;
618 return mem;
622 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
624 /* This is the most common failure type.
625 Catch it early so we can see who does it. */
626 if ((offset % GET_MODE_SIZE (mode)) != 0)
627 abort ();
629 /* This check isn't usable right now because combine will
630 throw arbitrary crap like a CALL into a SUBREG in
631 gen_lowpart_for_combine so we must just eat it. */
632 #if 0
633 /* Check for this too. */
634 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
635 abort ();
636 #endif
637 return gen_rtx_raw_SUBREG (mode, reg, offset);
640 /* Generate a SUBREG representing the least-significant part of REG if MODE
641 is smaller than mode of REG, otherwise paradoxical SUBREG. */
644 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
646 enum machine_mode inmode;
648 inmode = GET_MODE (reg);
649 if (inmode == VOIDmode)
650 inmode = mode;
651 return gen_rtx_SUBREG (mode, reg,
652 subreg_lowpart_offset (mode, inmode));
655 /* gen_rtvec (n, [rt1, ..., rtn])
657 ** This routine creates an rtvec and stores within it the
658 ** pointers to rtx's which are its arguments.
661 /*VARARGS1*/
662 rtvec
663 gen_rtvec (int n, ...)
665 int i, save_n;
666 rtx *vector;
667 va_list p;
669 va_start (p, n);
671 if (n == 0)
672 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
674 vector = alloca (n * sizeof (rtx));
676 for (i = 0; i < n; i++)
677 vector[i] = va_arg (p, rtx);
679 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
680 save_n = n;
681 va_end (p);
683 return gen_rtvec_v (save_n, vector);
686 rtvec
687 gen_rtvec_v (int n, rtx *argp)
689 int i;
690 rtvec rt_val;
692 if (n == 0)
693 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
695 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
697 for (i = 0; i < n; i++)
698 rt_val->elem[i] = *argp++;
700 return rt_val;
703 /* Generate a REG rtx for a new pseudo register of mode MODE.
704 This pseudo is assigned the next sequential register number. */
707 gen_reg_rtx (enum machine_mode mode)
709 struct function *f = cfun;
710 rtx val;
712 /* Don't let anything called after initial flow analysis create new
713 registers. */
714 if (no_new_pseudos)
715 abort ();
717 if (generating_concat_p
718 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
719 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
721 /* For complex modes, don't make a single pseudo.
722 Instead, make a CONCAT of two pseudos.
723 This allows noncontiguous allocation of the real and imaginary parts,
724 which makes much better code. Besides, allocating DCmode
725 pseudos overstrains reload on some machines like the 386. */
726 rtx realpart, imagpart;
727 enum machine_mode partmode = GET_MODE_INNER (mode);
729 realpart = gen_reg_rtx (partmode);
730 imagpart = gen_reg_rtx (partmode);
731 return gen_rtx_CONCAT (mode, realpart, imagpart);
734 /* Make sure regno_pointer_align, and regno_reg_rtx are large
735 enough to have an element for this pseudo reg number. */
737 if (reg_rtx_no == f->emit->regno_pointer_align_length)
739 int old_size = f->emit->regno_pointer_align_length;
740 char *new;
741 rtx *new1;
743 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
744 memset (new + old_size, 0, old_size);
745 f->emit->regno_pointer_align = (unsigned char *) new;
747 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
748 old_size * 2 * sizeof (rtx));
749 memset (new1 + old_size, 0, old_size * sizeof (rtx));
750 regno_reg_rtx = new1;
752 f->emit->regno_pointer_align_length = old_size * 2;
755 val = gen_raw_REG (mode, reg_rtx_no);
756 regno_reg_rtx[reg_rtx_no++] = val;
757 return val;
760 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
761 Do the big endian correction if needed. */
764 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
766 rtx new = gen_rtx_REG (mode, regno);
767 tree decl;
768 HOST_WIDE_INT var_size;
770 /* PR middle-end/14084
771 The problem appears when a variable is stored in a larger register
772 and later it is used in the original mode or some mode in between
773 or some part of variable is accessed.
775 On little endian machines there is no problem because
776 the REG_OFFSET of the start of the variable is the same when
777 accessed in any mode (it is 0).
779 However, this is not true on big endian machines.
780 The offset of the start of the variable is different when accessed
781 in different modes.
782 When we are taking a part of the REG we have to change the OFFSET
783 from offset WRT size of mode of REG to offset WRT size of variable.
785 If we would not do the big endian correction the resulting REG_OFFSET
786 would be larger than the size of the DECL.
788 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
790 REG.mode MODE DECL size old offset new offset description
791 DI SI 4 4 0 int32 in SImode
792 DI SI 1 4 0 char in SImode
793 DI QI 1 7 0 char in QImode
794 DI QI 4 5 1 1st element in QImode
795 of char[4]
796 DI HI 4 6 2 1st element in HImode
797 of int16[2]
799 If the size of DECL is equal or greater than the size of REG
800 we can't do this correction because the register holds the
801 whole variable or a part of the variable and thus the REG_OFFSET
802 is already correct. */
804 decl = REG_EXPR (reg);
805 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
806 && decl != NULL
807 && offset > 0
808 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
809 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
810 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
812 int offset_le;
814 /* Convert machine endian to little endian WRT size of mode of REG. */
815 if (WORDS_BIG_ENDIAN)
816 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
817 / UNITS_PER_WORD) * UNITS_PER_WORD;
818 else
819 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
821 if (BYTES_BIG_ENDIAN)
822 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
823 % UNITS_PER_WORD);
824 else
825 offset_le += offset % UNITS_PER_WORD;
827 if (offset_le >= var_size)
829 /* MODE is wider than the variable so the new reg will cover
830 the whole variable so the resulting OFFSET should be 0. */
831 offset = 0;
833 else
835 /* Convert little endian to machine endian WRT size of variable. */
836 if (WORDS_BIG_ENDIAN)
837 offset = ((var_size - 1 - offset_le)
838 / UNITS_PER_WORD) * UNITS_PER_WORD;
839 else
840 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
842 if (BYTES_BIG_ENDIAN)
843 offset += ((var_size - 1 - offset_le)
844 % UNITS_PER_WORD);
845 else
846 offset += offset_le % UNITS_PER_WORD;
850 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
851 REG_OFFSET (reg) + offset);
852 return new;
855 /* Set the decl for MEM to DECL. */
857 void
858 set_reg_attrs_from_mem (rtx reg, rtx mem)
860 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
861 REG_ATTRS (reg)
862 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
865 /* Set the register attributes for registers contained in PARM_RTX.
866 Use needed values from memory attributes of MEM. */
868 void
869 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
871 if (REG_P (parm_rtx))
872 set_reg_attrs_from_mem (parm_rtx, mem);
873 else if (GET_CODE (parm_rtx) == PARALLEL)
875 /* Check for a NULL entry in the first slot, used to indicate that the
876 parameter goes both on the stack and in registers. */
877 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
878 for (; i < XVECLEN (parm_rtx, 0); i++)
880 rtx x = XVECEXP (parm_rtx, 0, i);
881 if (REG_P (XEXP (x, 0)))
882 REG_ATTRS (XEXP (x, 0))
883 = get_reg_attrs (MEM_EXPR (mem),
884 INTVAL (XEXP (x, 1)));
889 /* Assign the RTX X to declaration T. */
890 void
891 set_decl_rtl (tree t, rtx x)
893 DECL_CHECK (t)->decl.rtl = x;
895 if (!x)
896 return;
897 /* For register, we maintain the reverse information too. */
898 if (REG_P (x))
899 REG_ATTRS (x) = get_reg_attrs (t, 0);
900 else if (GET_CODE (x) == SUBREG)
901 REG_ATTRS (SUBREG_REG (x))
902 = get_reg_attrs (t, -SUBREG_BYTE (x));
903 if (GET_CODE (x) == CONCAT)
905 if (REG_P (XEXP (x, 0)))
906 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
907 if (REG_P (XEXP (x, 1)))
908 REG_ATTRS (XEXP (x, 1))
909 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
911 if (GET_CODE (x) == PARALLEL)
913 int i;
914 for (i = 0; i < XVECLEN (x, 0); i++)
916 rtx y = XVECEXP (x, 0, i);
917 if (REG_P (XEXP (y, 0)))
918 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
923 /* Assign the RTX X to parameter declaration T. */
924 void
925 set_decl_incoming_rtl (tree t, rtx x)
927 DECL_INCOMING_RTL (t) = x;
929 if (!x)
930 return;
931 /* For register, we maintain the reverse information too. */
932 if (REG_P (x))
933 REG_ATTRS (x) = get_reg_attrs (t, 0);
934 else if (GET_CODE (x) == SUBREG)
935 REG_ATTRS (SUBREG_REG (x))
936 = get_reg_attrs (t, -SUBREG_BYTE (x));
937 if (GET_CODE (x) == CONCAT)
939 if (REG_P (XEXP (x, 0)))
940 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
941 if (REG_P (XEXP (x, 1)))
942 REG_ATTRS (XEXP (x, 1))
943 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
945 if (GET_CODE (x) == PARALLEL)
947 int i, start;
949 /* Check for a NULL entry, used to indicate that the parameter goes
950 both on the stack and in registers. */
951 if (XEXP (XVECEXP (x, 0, 0), 0))
952 start = 0;
953 else
954 start = 1;
956 for (i = start; i < XVECLEN (x, 0); i++)
958 rtx y = XVECEXP (x, 0, i);
959 if (REG_P (XEXP (y, 0)))
960 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
965 /* Identify REG (which may be a CONCAT) as a user register. */
967 void
968 mark_user_reg (rtx reg)
970 if (GET_CODE (reg) == CONCAT)
972 REG_USERVAR_P (XEXP (reg, 0)) = 1;
973 REG_USERVAR_P (XEXP (reg, 1)) = 1;
975 else if (REG_P (reg))
976 REG_USERVAR_P (reg) = 1;
977 else
978 abort ();
981 /* Identify REG as a probable pointer register and show its alignment
982 as ALIGN, if nonzero. */
984 void
985 mark_reg_pointer (rtx reg, int align)
987 if (! REG_POINTER (reg))
989 REG_POINTER (reg) = 1;
991 if (align)
992 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
994 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
995 /* We can no-longer be sure just how aligned this pointer is. */
996 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
999 /* Return 1 plus largest pseudo reg number used in the current function. */
1002 max_reg_num (void)
1004 return reg_rtx_no;
1007 /* Return 1 + the largest label number used so far in the current function. */
1010 max_label_num (void)
1012 if (last_label_num && label_num == base_label_num)
1013 return last_label_num;
1014 return label_num;
1017 /* Return first label number used in this function (if any were used). */
1020 get_first_label_num (void)
1022 return first_label_num;
1025 /* If the rtx for label was created during the expansion of a nested
1026 function, then first_label_num won't include this label number.
1027 Fix this now so that array indicies work later. */
1029 void
1030 maybe_set_first_label_num (rtx x)
1032 if (CODE_LABEL_NUMBER (x) < first_label_num)
1033 first_label_num = CODE_LABEL_NUMBER (x);
1036 /* Return the final regno of X, which is a SUBREG of a hard
1037 register. */
1039 subreg_hard_regno (rtx x, int check_mode)
1041 enum machine_mode mode = GET_MODE (x);
1042 unsigned int byte_offset, base_regno, final_regno;
1043 rtx reg = SUBREG_REG (x);
1045 /* This is where we attempt to catch illegal subregs
1046 created by the compiler. */
1047 if (GET_CODE (x) != SUBREG
1048 || !REG_P (reg))
1049 abort ();
1050 base_regno = REGNO (reg);
1051 if (base_regno >= FIRST_PSEUDO_REGISTER)
1052 abort ();
1053 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1054 abort ();
1055 #ifdef ENABLE_CHECKING
1056 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1057 SUBREG_BYTE (x), mode))
1058 abort ();
1059 #endif
1060 /* Catch non-congruent offsets too. */
1061 byte_offset = SUBREG_BYTE (x);
1062 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1063 abort ();
1065 final_regno = subreg_regno (x);
1067 return final_regno;
1070 /* Return a value representing some low-order bits of X, where the number
1071 of low-order bits is given by MODE. Note that no conversion is done
1072 between floating-point and fixed-point values, rather, the bit
1073 representation is returned.
1075 This function handles the cases in common between gen_lowpart, below,
1076 and two variants in cse.c and combine.c. These are the cases that can
1077 be safely handled at all points in the compilation.
1079 If this is not a case we can handle, return 0. */
1082 gen_lowpart_common (enum machine_mode mode, rtx x)
1084 int msize = GET_MODE_SIZE (mode);
1085 int xsize;
1086 int offset = 0;
1087 enum machine_mode innermode;
1089 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1090 so we have to make one up. Yuk. */
1091 innermode = GET_MODE (x);
1092 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1093 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1094 else if (innermode == VOIDmode)
1095 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1097 xsize = GET_MODE_SIZE (innermode);
1099 if (innermode == VOIDmode || innermode == BLKmode)
1100 abort ();
1102 if (innermode == mode)
1103 return x;
1105 /* MODE must occupy no more words than the mode of X. */
1106 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1107 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1108 return 0;
1110 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1111 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1112 return 0;
1114 offset = subreg_lowpart_offset (mode, innermode);
1116 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1117 && (GET_MODE_CLASS (mode) == MODE_INT
1118 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1120 /* If we are getting the low-order part of something that has been
1121 sign- or zero-extended, we can either just use the object being
1122 extended or make a narrower extension. If we want an even smaller
1123 piece than the size of the object being extended, call ourselves
1124 recursively.
1126 This case is used mostly by combine and cse. */
1128 if (GET_MODE (XEXP (x, 0)) == mode)
1129 return XEXP (x, 0);
1130 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1131 return gen_lowpart_common (mode, XEXP (x, 0));
1132 else if (msize < xsize)
1133 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1135 else if (GET_CODE (x) == SUBREG || REG_P (x)
1136 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1137 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1138 return simplify_gen_subreg (mode, x, innermode, offset);
1140 /* Otherwise, we can't do this. */
1141 return 0;
1144 /* Return the constant real or imaginary part (which has mode MODE)
1145 of a complex value X. The IMAGPART_P argument determines whether
1146 the real or complex component should be returned. This function
1147 returns NULL_RTX if the component isn't a constant. */
1149 static rtx
1150 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1152 tree decl, part;
1154 if (MEM_P (x)
1155 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1157 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1158 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1160 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1161 if (TREE_CODE (part) == REAL_CST
1162 || TREE_CODE (part) == INTEGER_CST)
1163 return expand_expr (part, NULL_RTX, mode, 0);
1166 return NULL_RTX;
1169 /* Return the real part (which has mode MODE) of a complex value X.
1170 This always comes at the low address in memory. */
1173 gen_realpart (enum machine_mode mode, rtx x)
1175 rtx part;
1177 /* Handle complex constants. */
1178 part = gen_complex_constant_part (mode, x, 0);
1179 if (part != NULL_RTX)
1180 return part;
1182 if (WORDS_BIG_ENDIAN
1183 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1184 && REG_P (x)
1185 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1186 internal_error
1187 ("can't access real part of complex value in hard register");
1188 else if (WORDS_BIG_ENDIAN)
1189 return gen_highpart (mode, x);
1190 else
1191 return gen_lowpart (mode, x);
1194 /* Return the imaginary part (which has mode MODE) of a complex value X.
1195 This always comes at the high address in memory. */
1198 gen_imagpart (enum machine_mode mode, rtx x)
1200 rtx part;
1202 /* Handle complex constants. */
1203 part = gen_complex_constant_part (mode, x, 1);
1204 if (part != NULL_RTX)
1205 return part;
1207 if (WORDS_BIG_ENDIAN)
1208 return gen_lowpart (mode, x);
1209 else if (! WORDS_BIG_ENDIAN
1210 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1211 && REG_P (x)
1212 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1213 internal_error
1214 ("can't access imaginary part of complex value in hard register");
1215 else
1216 return gen_highpart (mode, x);
1220 gen_highpart (enum machine_mode mode, rtx x)
1222 unsigned int msize = GET_MODE_SIZE (mode);
1223 rtx result;
1225 /* This case loses if X is a subreg. To catch bugs early,
1226 complain if an invalid MODE is used even in other cases. */
1227 if (msize > UNITS_PER_WORD
1228 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
1229 abort ();
1231 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1232 subreg_highpart_offset (mode, GET_MODE (x)));
1234 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1235 the target if we have a MEM. gen_highpart must return a valid operand,
1236 emitting code if necessary to do so. */
1237 if (result != NULL_RTX && MEM_P (result))
1238 result = validize_mem (result);
1240 if (!result)
1241 abort ();
1242 return result;
1245 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1246 be VOIDmode constant. */
1248 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1250 if (GET_MODE (exp) != VOIDmode)
1252 if (GET_MODE (exp) != innermode)
1253 abort ();
1254 return gen_highpart (outermode, exp);
1256 return simplify_gen_subreg (outermode, exp, innermode,
1257 subreg_highpart_offset (outermode, innermode));
1260 /* Return offset in bytes to get OUTERMODE low part
1261 of the value in mode INNERMODE stored in memory in target format. */
1263 unsigned int
1264 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1266 unsigned int offset = 0;
1267 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1269 if (difference > 0)
1271 if (WORDS_BIG_ENDIAN)
1272 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1273 if (BYTES_BIG_ENDIAN)
1274 offset += difference % UNITS_PER_WORD;
1277 return offset;
1280 /* Return offset in bytes to get OUTERMODE high part
1281 of the value in mode INNERMODE stored in memory in target format. */
1282 unsigned int
1283 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1285 unsigned int offset = 0;
1286 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1288 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1289 abort ();
1291 if (difference > 0)
1293 if (! WORDS_BIG_ENDIAN)
1294 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1295 if (! BYTES_BIG_ENDIAN)
1296 offset += difference % UNITS_PER_WORD;
1299 return offset;
1302 /* Return 1 iff X, assumed to be a SUBREG,
1303 refers to the least significant part of its containing reg.
1304 If X is not a SUBREG, always return 1 (it is its own low part!). */
1307 subreg_lowpart_p (rtx x)
1309 if (GET_CODE (x) != SUBREG)
1310 return 1;
1311 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1312 return 0;
1314 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1315 == SUBREG_BYTE (x));
1318 /* Return subword OFFSET of operand OP.
1319 The word number, OFFSET, is interpreted as the word number starting
1320 at the low-order address. OFFSET 0 is the low-order word if not
1321 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1323 If we cannot extract the required word, we return zero. Otherwise,
1324 an rtx corresponding to the requested word will be returned.
1326 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1327 reload has completed, a valid address will always be returned. After
1328 reload, if a valid address cannot be returned, we return zero.
1330 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1331 it is the responsibility of the caller.
1333 MODE is the mode of OP in case it is a CONST_INT.
1335 ??? This is still rather broken for some cases. The problem for the
1336 moment is that all callers of this thing provide no 'goal mode' to
1337 tell us to work with. This exists because all callers were written
1338 in a word based SUBREG world.
1339 Now use of this function can be deprecated by simplify_subreg in most
1340 cases.
1344 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1346 if (mode == VOIDmode)
1347 mode = GET_MODE (op);
1349 if (mode == VOIDmode)
1350 abort ();
1352 /* If OP is narrower than a word, fail. */
1353 if (mode != BLKmode
1354 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1355 return 0;
1357 /* If we want a word outside OP, return zero. */
1358 if (mode != BLKmode
1359 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1360 return const0_rtx;
1362 /* Form a new MEM at the requested address. */
1363 if (MEM_P (op))
1365 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1367 if (! validate_address)
1368 return new;
1370 else if (reload_completed)
1372 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1373 return 0;
1375 else
1376 return replace_equiv_address (new, XEXP (new, 0));
1379 /* Rest can be handled by simplify_subreg. */
1380 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1383 /* Similar to `operand_subword', but never return 0. If we can't extract
1384 the required subword, put OP into a register and try again. If that fails,
1385 abort. We always validate the address in this case.
1387 MODE is the mode of OP, in case it is CONST_INT. */
1390 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1392 rtx result = operand_subword (op, offset, 1, mode);
1394 if (result)
1395 return result;
1397 if (mode != BLKmode && mode != VOIDmode)
1399 /* If this is a register which can not be accessed by words, copy it
1400 to a pseudo register. */
1401 if (REG_P (op))
1402 op = copy_to_reg (op);
1403 else
1404 op = force_reg (mode, op);
1407 result = operand_subword (op, offset, 1, mode);
1408 if (result == 0)
1409 abort ();
1411 return result;
1414 /* Given a compare instruction, swap the operands.
1415 A test instruction is changed into a compare of 0 against the operand. */
1417 void
1418 reverse_comparison (rtx insn)
1420 rtx body = PATTERN (insn);
1421 rtx comp;
1423 if (GET_CODE (body) == SET)
1424 comp = SET_SRC (body);
1425 else
1426 comp = SET_SRC (XVECEXP (body, 0, 0));
1428 if (GET_CODE (comp) == COMPARE)
1430 rtx op0 = XEXP (comp, 0);
1431 rtx op1 = XEXP (comp, 1);
1432 XEXP (comp, 0) = op1;
1433 XEXP (comp, 1) = op0;
1435 else
1437 rtx new = gen_rtx_COMPARE (VOIDmode,
1438 CONST0_RTX (GET_MODE (comp)), comp);
1439 if (GET_CODE (body) == SET)
1440 SET_SRC (body) = new;
1441 else
1442 SET_SRC (XVECEXP (body, 0, 0)) = new;
1446 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1447 or (2) a component ref of something variable. Represent the later with
1448 a NULL expression. */
1450 static tree
1451 component_ref_for_mem_expr (tree ref)
1453 tree inner = TREE_OPERAND (ref, 0);
1455 if (TREE_CODE (inner) == COMPONENT_REF)
1456 inner = component_ref_for_mem_expr (inner);
1457 else
1459 /* Now remove any conversions: they don't change what the underlying
1460 object is. Likewise for SAVE_EXPR. */
1461 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1462 || TREE_CODE (inner) == NON_LVALUE_EXPR
1463 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1464 || TREE_CODE (inner) == SAVE_EXPR)
1465 inner = TREE_OPERAND (inner, 0);
1467 if (! DECL_P (inner))
1468 inner = NULL_TREE;
1471 if (inner == TREE_OPERAND (ref, 0))
1472 return ref;
1473 else
1474 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1475 TREE_OPERAND (ref, 1), NULL_TREE);
1478 /* Returns 1 if both MEM_EXPR can be considered equal
1479 and 0 otherwise. */
1482 mem_expr_equal_p (tree expr1, tree expr2)
1484 if (expr1 == expr2)
1485 return 1;
1487 if (! expr1 || ! expr2)
1488 return 0;
1490 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1491 return 0;
1493 if (TREE_CODE (expr1) == COMPONENT_REF)
1494 return
1495 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1496 TREE_OPERAND (expr2, 0))
1497 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1498 TREE_OPERAND (expr2, 1));
1500 if (TREE_CODE (expr1) == INDIRECT_REF)
1501 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1502 TREE_OPERAND (expr2, 0));
1504 /* Decls with different pointers can't be equal. */
1505 if (DECL_P (expr1))
1506 return 0;
1508 abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1509 have been resolved here. */
1512 /* Given REF, a MEM, and T, either the type of X or the expression
1513 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1514 if we are making a new object of this type. BITPOS is nonzero if
1515 there is an offset outstanding on T that will be applied later. */
1517 void
1518 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1519 HOST_WIDE_INT bitpos)
1521 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1522 tree expr = MEM_EXPR (ref);
1523 rtx offset = MEM_OFFSET (ref);
1524 rtx size = MEM_SIZE (ref);
1525 unsigned int align = MEM_ALIGN (ref);
1526 HOST_WIDE_INT apply_bitpos = 0;
1527 tree type;
1529 /* It can happen that type_for_mode was given a mode for which there
1530 is no language-level type. In which case it returns NULL, which
1531 we can see here. */
1532 if (t == NULL_TREE)
1533 return;
1535 type = TYPE_P (t) ? t : TREE_TYPE (t);
1536 if (type == error_mark_node)
1537 return;
1539 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1540 wrong answer, as it assumes that DECL_RTL already has the right alias
1541 info. Callers should not set DECL_RTL until after the call to
1542 set_mem_attributes. */
1543 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1544 abort ();
1546 /* Get the alias set from the expression or type (perhaps using a
1547 front-end routine) and use it. */
1548 alias = get_alias_set (t);
1550 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1551 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1552 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1553 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
1555 /* If we are making an object of this type, or if this is a DECL, we know
1556 that it is a scalar if the type is not an aggregate. */
1557 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1558 MEM_SCALAR_P (ref) = 1;
1560 /* We can set the alignment from the type if we are making an object,
1561 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1562 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1563 align = MAX (align, TYPE_ALIGN (type));
1565 /* If the size is known, we can set that. */
1566 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1567 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1569 /* If T is not a type, we may be able to deduce some more information about
1570 the expression. */
1571 if (! TYPE_P (t))
1573 tree base = get_base_address (t);
1574 if (base && DECL_P (base)
1575 && TREE_READONLY (base)
1576 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1577 MEM_READONLY_P (ref) = 1;
1579 if (TREE_THIS_VOLATILE (t))
1580 MEM_VOLATILE_P (ref) = 1;
1582 /* Now remove any conversions: they don't change what the underlying
1583 object is. Likewise for SAVE_EXPR. */
1584 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1585 || TREE_CODE (t) == NON_LVALUE_EXPR
1586 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1587 || TREE_CODE (t) == SAVE_EXPR)
1588 t = TREE_OPERAND (t, 0);
1590 /* If this expression can't be addressed (e.g., it contains a reference
1591 to a non-addressable field), show we don't change its alias set. */
1592 if (! can_address_p (t))
1593 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1595 /* If this is a decl, set the attributes of the MEM from it. */
1596 if (DECL_P (t))
1598 expr = t;
1599 offset = const0_rtx;
1600 apply_bitpos = bitpos;
1601 size = (DECL_SIZE_UNIT (t)
1602 && host_integerp (DECL_SIZE_UNIT (t), 1)
1603 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1604 align = DECL_ALIGN (t);
1607 /* If this is a constant, we know the alignment. */
1608 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1610 align = TYPE_ALIGN (type);
1611 #ifdef CONSTANT_ALIGNMENT
1612 align = CONSTANT_ALIGNMENT (t, align);
1613 #endif
1616 /* If this is a field reference and not a bit-field, record it. */
1617 /* ??? There is some information that can be gleened from bit-fields,
1618 such as the word offset in the structure that might be modified.
1619 But skip it for now. */
1620 else if (TREE_CODE (t) == COMPONENT_REF
1621 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1623 expr = component_ref_for_mem_expr (t);
1624 offset = const0_rtx;
1625 apply_bitpos = bitpos;
1626 /* ??? Any reason the field size would be different than
1627 the size we got from the type? */
1630 /* If this is an array reference, look for an outer field reference. */
1631 else if (TREE_CODE (t) == ARRAY_REF)
1633 tree off_tree = size_zero_node;
1634 /* We can't modify t, because we use it at the end of the
1635 function. */
1636 tree t2 = t;
1640 tree index = TREE_OPERAND (t2, 1);
1641 tree low_bound = array_ref_low_bound (t2);
1642 tree unit_size = array_ref_element_size (t2);
1644 /* We assume all arrays have sizes that are a multiple of a byte.
1645 First subtract the lower bound, if any, in the type of the
1646 index, then convert to sizetype and multiply by the size of
1647 the array element. */
1648 if (! integer_zerop (low_bound))
1649 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
1650 index, low_bound));
1652 off_tree = size_binop (PLUS_EXPR,
1653 size_binop (MULT_EXPR, convert (sizetype,
1654 index),
1655 unit_size),
1656 off_tree);
1657 t2 = TREE_OPERAND (t2, 0);
1659 while (TREE_CODE (t2) == ARRAY_REF);
1661 if (DECL_P (t2))
1663 expr = t2;
1664 offset = NULL;
1665 if (host_integerp (off_tree, 1))
1667 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1668 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1669 align = DECL_ALIGN (t2);
1670 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1671 align = aoff;
1672 offset = GEN_INT (ioff);
1673 apply_bitpos = bitpos;
1676 else if (TREE_CODE (t2) == COMPONENT_REF)
1678 expr = component_ref_for_mem_expr (t2);
1679 if (host_integerp (off_tree, 1))
1681 offset = GEN_INT (tree_low_cst (off_tree, 1));
1682 apply_bitpos = bitpos;
1684 /* ??? Any reason the field size would be different than
1685 the size we got from the type? */
1687 else if (flag_argument_noalias > 1
1688 && TREE_CODE (t2) == INDIRECT_REF
1689 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1691 expr = t2;
1692 offset = NULL;
1696 /* If this is a Fortran indirect argument reference, record the
1697 parameter decl. */
1698 else if (flag_argument_noalias > 1
1699 && TREE_CODE (t) == INDIRECT_REF
1700 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1702 expr = t;
1703 offset = NULL;
1707 /* If we modified OFFSET based on T, then subtract the outstanding
1708 bit position offset. Similarly, increase the size of the accessed
1709 object to contain the negative offset. */
1710 if (apply_bitpos)
1712 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1713 if (size)
1714 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1717 /* Now set the attributes we computed above. */
1718 MEM_ATTRS (ref)
1719 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1721 /* If this is already known to be a scalar or aggregate, we are done. */
1722 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1723 return;
1725 /* If it is a reference into an aggregate, this is part of an aggregate.
1726 Otherwise we don't know. */
1727 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1728 || TREE_CODE (t) == ARRAY_RANGE_REF
1729 || TREE_CODE (t) == BIT_FIELD_REF)
1730 MEM_IN_STRUCT_P (ref) = 1;
1733 void
1734 set_mem_attributes (rtx ref, tree t, int objectp)
1736 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1739 /* Set the decl for MEM to DECL. */
1741 void
1742 set_mem_attrs_from_reg (rtx mem, rtx reg)
1744 MEM_ATTRS (mem)
1745 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1746 GEN_INT (REG_OFFSET (reg)),
1747 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1750 /* Set the alias set of MEM to SET. */
1752 void
1753 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1755 #ifdef ENABLE_CHECKING
1756 /* If the new and old alias sets don't conflict, something is wrong. */
1757 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1758 abort ();
1759 #endif
1761 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1762 MEM_SIZE (mem), MEM_ALIGN (mem),
1763 GET_MODE (mem));
1766 /* Set the alignment of MEM to ALIGN bits. */
1768 void
1769 set_mem_align (rtx mem, unsigned int align)
1771 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1772 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1773 GET_MODE (mem));
1776 /* Set the expr for MEM to EXPR. */
1778 void
1779 set_mem_expr (rtx mem, tree expr)
1781 MEM_ATTRS (mem)
1782 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1783 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1786 /* Set the offset of MEM to OFFSET. */
1788 void
1789 set_mem_offset (rtx mem, rtx offset)
1791 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1792 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1793 GET_MODE (mem));
1796 /* Set the size of MEM to SIZE. */
1798 void
1799 set_mem_size (rtx mem, rtx size)
1801 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1802 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1803 GET_MODE (mem));
1806 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1807 and its address changed to ADDR. (VOIDmode means don't change the mode.
1808 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1809 returned memory location is required to be valid. The memory
1810 attributes are not changed. */
1812 static rtx
1813 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1815 rtx new;
1817 if (!MEM_P (memref))
1818 abort ();
1819 if (mode == VOIDmode)
1820 mode = GET_MODE (memref);
1821 if (addr == 0)
1822 addr = XEXP (memref, 0);
1823 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1824 && (!validate || memory_address_p (mode, addr)))
1825 return memref;
1827 if (validate)
1829 if (reload_in_progress || reload_completed)
1831 if (! memory_address_p (mode, addr))
1832 abort ();
1834 else
1835 addr = memory_address (mode, addr);
1838 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1839 return memref;
1841 new = gen_rtx_MEM (mode, addr);
1842 MEM_COPY_ATTRIBUTES (new, memref);
1843 return new;
1846 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1847 way we are changing MEMREF, so we only preserve the alias set. */
1850 change_address (rtx memref, enum machine_mode mode, rtx addr)
1852 rtx new = change_address_1 (memref, mode, addr, 1), size;
1853 enum machine_mode mmode = GET_MODE (new);
1854 unsigned int align;
1856 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1857 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1859 /* If there are no changes, just return the original memory reference. */
1860 if (new == memref)
1862 if (MEM_ATTRS (memref) == 0
1863 || (MEM_EXPR (memref) == NULL
1864 && MEM_OFFSET (memref) == NULL
1865 && MEM_SIZE (memref) == size
1866 && MEM_ALIGN (memref) == align))
1867 return new;
1869 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1870 MEM_COPY_ATTRIBUTES (new, memref);
1873 MEM_ATTRS (new)
1874 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1876 return new;
1879 /* Return a memory reference like MEMREF, but with its mode changed
1880 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1881 nonzero, the memory address is forced to be valid.
1882 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1883 and caller is responsible for adjusting MEMREF base register. */
1886 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1887 int validate, int adjust)
1889 rtx addr = XEXP (memref, 0);
1890 rtx new;
1891 rtx memoffset = MEM_OFFSET (memref);
1892 rtx size = 0;
1893 unsigned int memalign = MEM_ALIGN (memref);
1895 /* If there are no changes, just return the original memory reference. */
1896 if (mode == GET_MODE (memref) && !offset
1897 && (!validate || memory_address_p (mode, addr)))
1898 return memref;
1900 /* ??? Prefer to create garbage instead of creating shared rtl.
1901 This may happen even if offset is nonzero -- consider
1902 (plus (plus reg reg) const_int) -- so do this always. */
1903 addr = copy_rtx (addr);
1905 if (adjust)
1907 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1908 object, we can merge it into the LO_SUM. */
1909 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1910 && offset >= 0
1911 && (unsigned HOST_WIDE_INT) offset
1912 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1913 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1914 plus_constant (XEXP (addr, 1), offset));
1915 else
1916 addr = plus_constant (addr, offset);
1919 new = change_address_1 (memref, mode, addr, validate);
1921 /* Compute the new values of the memory attributes due to this adjustment.
1922 We add the offsets and update the alignment. */
1923 if (memoffset)
1924 memoffset = GEN_INT (offset + INTVAL (memoffset));
1926 /* Compute the new alignment by taking the MIN of the alignment and the
1927 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1928 if zero. */
1929 if (offset != 0)
1930 memalign
1931 = MIN (memalign,
1932 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1934 /* We can compute the size in a number of ways. */
1935 if (GET_MODE (new) != BLKmode)
1936 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1937 else if (MEM_SIZE (memref))
1938 size = plus_constant (MEM_SIZE (memref), -offset);
1940 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1941 memoffset, size, memalign, GET_MODE (new));
1943 /* At some point, we should validate that this offset is within the object,
1944 if all the appropriate values are known. */
1945 return new;
1948 /* Return a memory reference like MEMREF, but with its mode changed
1949 to MODE and its address changed to ADDR, which is assumed to be
1950 MEMREF offseted by OFFSET bytes. If VALIDATE is
1951 nonzero, the memory address is forced to be valid. */
1954 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1955 HOST_WIDE_INT offset, int validate)
1957 memref = change_address_1 (memref, VOIDmode, addr, validate);
1958 return adjust_address_1 (memref, mode, offset, validate, 0);
1961 /* Return a memory reference like MEMREF, but whose address is changed by
1962 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1963 known to be in OFFSET (possibly 1). */
1966 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1968 rtx new, addr = XEXP (memref, 0);
1970 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1972 /* At this point we don't know _why_ the address is invalid. It
1973 could have secondary memory references, multiplies or anything.
1975 However, if we did go and rearrange things, we can wind up not
1976 being able to recognize the magic around pic_offset_table_rtx.
1977 This stuff is fragile, and is yet another example of why it is
1978 bad to expose PIC machinery too early. */
1979 if (! memory_address_p (GET_MODE (memref), new)
1980 && GET_CODE (addr) == PLUS
1981 && XEXP (addr, 0) == pic_offset_table_rtx)
1983 addr = force_reg (GET_MODE (addr), addr);
1984 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1987 update_temp_slot_address (XEXP (memref, 0), new);
1988 new = change_address_1 (memref, VOIDmode, new, 1);
1990 /* If there are no changes, just return the original memory reference. */
1991 if (new == memref)
1992 return new;
1994 /* Update the alignment to reflect the offset. Reset the offset, which
1995 we don't know. */
1996 MEM_ATTRS (new)
1997 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1998 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1999 GET_MODE (new));
2000 return new;
2003 /* Return a memory reference like MEMREF, but with its address changed to
2004 ADDR. The caller is asserting that the actual piece of memory pointed
2005 to is the same, just the form of the address is being changed, such as
2006 by putting something into a register. */
2009 replace_equiv_address (rtx memref, rtx addr)
2011 /* change_address_1 copies the memory attribute structure without change
2012 and that's exactly what we want here. */
2013 update_temp_slot_address (XEXP (memref, 0), addr);
2014 return change_address_1 (memref, VOIDmode, addr, 1);
2017 /* Likewise, but the reference is not required to be valid. */
2020 replace_equiv_address_nv (rtx memref, rtx addr)
2022 return change_address_1 (memref, VOIDmode, addr, 0);
2025 /* Return a memory reference like MEMREF, but with its mode widened to
2026 MODE and offset by OFFSET. This would be used by targets that e.g.
2027 cannot issue QImode memory operations and have to use SImode memory
2028 operations plus masking logic. */
2031 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2033 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2034 tree expr = MEM_EXPR (new);
2035 rtx memoffset = MEM_OFFSET (new);
2036 unsigned int size = GET_MODE_SIZE (mode);
2038 /* If there are no changes, just return the original memory reference. */
2039 if (new == memref)
2040 return new;
2042 /* If we don't know what offset we were at within the expression, then
2043 we can't know if we've overstepped the bounds. */
2044 if (! memoffset)
2045 expr = NULL_TREE;
2047 while (expr)
2049 if (TREE_CODE (expr) == COMPONENT_REF)
2051 tree field = TREE_OPERAND (expr, 1);
2052 tree offset = component_ref_field_offset (expr);
2054 if (! DECL_SIZE_UNIT (field))
2056 expr = NULL_TREE;
2057 break;
2060 /* Is the field at least as large as the access? If so, ok,
2061 otherwise strip back to the containing structure. */
2062 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2063 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2064 && INTVAL (memoffset) >= 0)
2065 break;
2067 if (! host_integerp (offset, 1))
2069 expr = NULL_TREE;
2070 break;
2073 expr = TREE_OPERAND (expr, 0);
2074 memoffset
2075 = (GEN_INT (INTVAL (memoffset)
2076 + tree_low_cst (offset, 1)
2077 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2078 / BITS_PER_UNIT)));
2080 /* Similarly for the decl. */
2081 else if (DECL_P (expr)
2082 && DECL_SIZE_UNIT (expr)
2083 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2084 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2085 && (! memoffset || INTVAL (memoffset) >= 0))
2086 break;
2087 else
2089 /* The widened memory access overflows the expression, which means
2090 that it could alias another expression. Zap it. */
2091 expr = NULL_TREE;
2092 break;
2096 if (! expr)
2097 memoffset = NULL_RTX;
2099 /* The widened memory may alias other stuff, so zap the alias set. */
2100 /* ??? Maybe use get_alias_set on any remaining expression. */
2102 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2103 MEM_ALIGN (new), mode);
2105 return new;
2108 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2111 gen_label_rtx (void)
2113 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2114 NULL, label_num++, NULL);
2117 /* For procedure integration. */
2119 /* Install new pointers to the first and last insns in the chain.
2120 Also, set cur_insn_uid to one higher than the last in use.
2121 Used for an inline-procedure after copying the insn chain. */
2123 void
2124 set_new_first_and_last_insn (rtx first, rtx last)
2126 rtx insn;
2128 first_insn = first;
2129 last_insn = last;
2130 cur_insn_uid = 0;
2132 for (insn = first; insn; insn = NEXT_INSN (insn))
2133 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2135 cur_insn_uid++;
2138 /* Set the last label number found in the current function.
2139 This is used when belatedly compiling an inline function. */
2141 void
2142 set_new_last_label_num (int last)
2144 base_label_num = label_num;
2145 last_label_num = last;
2148 /* Restore all variables describing the current status from the structure *P.
2149 This is used after a nested function. */
2151 void
2152 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2154 last_label_num = 0;
2157 /* Go through all the RTL insn bodies and copy any invalid shared
2158 structure. This routine should only be called once. */
2160 static void
2161 unshare_all_rtl_1 (tree fndecl, rtx insn)
2163 tree decl;
2165 /* Make sure that virtual parameters are not shared. */
2166 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2167 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2169 /* Make sure that virtual stack slots are not shared. */
2170 unshare_all_decls (DECL_INITIAL (fndecl));
2172 /* Unshare just about everything else. */
2173 unshare_all_rtl_in_chain (insn);
2175 /* Make sure the addresses of stack slots found outside the insn chain
2176 (such as, in DECL_RTL of a variable) are not shared
2177 with the insn chain.
2179 This special care is necessary when the stack slot MEM does not
2180 actually appear in the insn chain. If it does appear, its address
2181 is unshared from all else at that point. */
2182 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2185 /* Go through all the RTL insn bodies and copy any invalid shared
2186 structure, again. This is a fairly expensive thing to do so it
2187 should be done sparingly. */
2189 void
2190 unshare_all_rtl_again (rtx insn)
2192 rtx p;
2193 tree decl;
2195 for (p = insn; p; p = NEXT_INSN (p))
2196 if (INSN_P (p))
2198 reset_used_flags (PATTERN (p));
2199 reset_used_flags (REG_NOTES (p));
2200 reset_used_flags (LOG_LINKS (p));
2203 /* Make sure that virtual stack slots are not shared. */
2204 reset_used_decls (DECL_INITIAL (cfun->decl));
2206 /* Make sure that virtual parameters are not shared. */
2207 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2208 reset_used_flags (DECL_RTL (decl));
2210 reset_used_flags (stack_slot_list);
2212 unshare_all_rtl_1 (cfun->decl, insn);
2215 void
2216 unshare_all_rtl (void)
2218 unshare_all_rtl_1 (current_function_decl, get_insns ());
2221 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2222 Recursively does the same for subexpressions. */
2224 static void
2225 verify_rtx_sharing (rtx orig, rtx insn)
2227 rtx x = orig;
2228 int i;
2229 enum rtx_code code;
2230 const char *format_ptr;
2232 if (x == 0)
2233 return;
2235 code = GET_CODE (x);
2237 /* These types may be freely shared. */
2239 switch (code)
2241 case REG:
2242 case CONST_INT:
2243 case CONST_DOUBLE:
2244 case CONST_VECTOR:
2245 case SYMBOL_REF:
2246 case LABEL_REF:
2247 case CODE_LABEL:
2248 case PC:
2249 case CC0:
2250 case SCRATCH:
2251 return;
2252 /* SCRATCH must be shared because they represent distinct values. */
2253 case CLOBBER:
2254 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2255 return;
2256 break;
2258 case CONST:
2259 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2260 a LABEL_REF, it isn't sharable. */
2261 if (GET_CODE (XEXP (x, 0)) == PLUS
2262 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2263 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2264 return;
2265 break;
2267 case MEM:
2268 /* A MEM is allowed to be shared if its address is constant. */
2269 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2270 || reload_completed || reload_in_progress)
2271 return;
2273 break;
2275 default:
2276 break;
2279 /* This rtx may not be shared. If it has already been seen,
2280 replace it with a copy of itself. */
2282 if (RTX_FLAG (x, used))
2284 error ("Invalid rtl sharing found in the insn");
2285 debug_rtx (insn);
2286 error ("Shared rtx");
2287 debug_rtx (x);
2288 abort ();
2290 RTX_FLAG (x, used) = 1;
2292 /* Now scan the subexpressions recursively. */
2294 format_ptr = GET_RTX_FORMAT (code);
2296 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2298 switch (*format_ptr++)
2300 case 'e':
2301 verify_rtx_sharing (XEXP (x, i), insn);
2302 break;
2304 case 'E':
2305 if (XVEC (x, i) != NULL)
2307 int j;
2308 int len = XVECLEN (x, i);
2310 for (j = 0; j < len; j++)
2312 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2313 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2314 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2315 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2316 else
2317 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2320 break;
2323 return;
2326 /* Go through all the RTL insn bodies and check that there is no unexpected
2327 sharing in between the subexpressions. */
2329 void
2330 verify_rtl_sharing (void)
2332 rtx p;
2334 for (p = get_insns (); p; p = NEXT_INSN (p))
2335 if (INSN_P (p))
2337 reset_used_flags (PATTERN (p));
2338 reset_used_flags (REG_NOTES (p));
2339 reset_used_flags (LOG_LINKS (p));
2342 for (p = get_insns (); p; p = NEXT_INSN (p))
2343 if (INSN_P (p))
2345 verify_rtx_sharing (PATTERN (p), p);
2346 verify_rtx_sharing (REG_NOTES (p), p);
2347 verify_rtx_sharing (LOG_LINKS (p), p);
2351 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2352 Assumes the mark bits are cleared at entry. */
2354 void
2355 unshare_all_rtl_in_chain (rtx insn)
2357 for (; insn; insn = NEXT_INSN (insn))
2358 if (INSN_P (insn))
2360 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2361 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2362 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2366 /* Go through all virtual stack slots of a function and copy any
2367 shared structure. */
2368 static void
2369 unshare_all_decls (tree blk)
2371 tree t;
2373 /* Copy shared decls. */
2374 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2375 if (DECL_RTL_SET_P (t))
2376 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2378 /* Now process sub-blocks. */
2379 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2380 unshare_all_decls (t);
2383 /* Go through all virtual stack slots of a function and mark them as
2384 not shared. */
2385 static void
2386 reset_used_decls (tree blk)
2388 tree t;
2390 /* Mark decls. */
2391 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2392 if (DECL_RTL_SET_P (t))
2393 reset_used_flags (DECL_RTL (t));
2395 /* Now process sub-blocks. */
2396 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2397 reset_used_decls (t);
2400 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2401 placed in the result directly, rather than being copied. MAY_SHARE is
2402 either a MEM of an EXPR_LIST of MEMs. */
2405 copy_most_rtx (rtx orig, rtx may_share)
2407 rtx copy;
2408 int i, j;
2409 RTX_CODE code;
2410 const char *format_ptr;
2412 if (orig == may_share
2413 || (GET_CODE (may_share) == EXPR_LIST
2414 && in_expr_list_p (may_share, orig)))
2415 return orig;
2417 code = GET_CODE (orig);
2419 switch (code)
2421 case REG:
2422 case CONST_INT:
2423 case CONST_DOUBLE:
2424 case CONST_VECTOR:
2425 case SYMBOL_REF:
2426 case CODE_LABEL:
2427 case PC:
2428 case CC0:
2429 return orig;
2430 default:
2431 break;
2434 copy = rtx_alloc (code);
2435 PUT_MODE (copy, GET_MODE (orig));
2436 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2437 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2438 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2439 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2440 RTX_FLAG (copy, return_val) = RTX_FLAG (orig, return_val);
2442 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2444 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2446 switch (*format_ptr++)
2448 case 'e':
2449 XEXP (copy, i) = XEXP (orig, i);
2450 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2451 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2452 break;
2454 case 'u':
2455 XEXP (copy, i) = XEXP (orig, i);
2456 break;
2458 case 'E':
2459 case 'V':
2460 XVEC (copy, i) = XVEC (orig, i);
2461 if (XVEC (orig, i) != NULL)
2463 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2464 for (j = 0; j < XVECLEN (copy, i); j++)
2465 XVECEXP (copy, i, j)
2466 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2468 break;
2470 case 'w':
2471 XWINT (copy, i) = XWINT (orig, i);
2472 break;
2474 case 'n':
2475 case 'i':
2476 XINT (copy, i) = XINT (orig, i);
2477 break;
2479 case 't':
2480 XTREE (copy, i) = XTREE (orig, i);
2481 break;
2483 case 's':
2484 case 'S':
2485 XSTR (copy, i) = XSTR (orig, i);
2486 break;
2488 case '0':
2489 X0ANY (copy, i) = X0ANY (orig, i);
2490 break;
2492 default:
2493 abort ();
2496 return copy;
2499 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2500 Recursively does the same for subexpressions. Uses
2501 copy_rtx_if_shared_1 to reduce stack space. */
2504 copy_rtx_if_shared (rtx orig)
2506 copy_rtx_if_shared_1 (&orig);
2507 return orig;
2510 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2511 use. Recursively does the same for subexpressions. */
2513 static void
2514 copy_rtx_if_shared_1 (rtx *orig1)
2516 rtx x;
2517 int i;
2518 enum rtx_code code;
2519 rtx *last_ptr;
2520 const char *format_ptr;
2521 int copied = 0;
2522 int length;
2524 /* Repeat is used to turn tail-recursion into iteration. */
2525 repeat:
2526 x = *orig1;
2528 if (x == 0)
2529 return;
2531 code = GET_CODE (x);
2533 /* These types may be freely shared. */
2535 switch (code)
2537 case REG:
2538 case CONST_INT:
2539 case CONST_DOUBLE:
2540 case CONST_VECTOR:
2541 case SYMBOL_REF:
2542 case LABEL_REF:
2543 case CODE_LABEL:
2544 case PC:
2545 case CC0:
2546 case SCRATCH:
2547 /* SCRATCH must be shared because they represent distinct values. */
2548 return;
2549 case CLOBBER:
2550 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2551 return;
2552 break;
2554 case CONST:
2555 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2556 a LABEL_REF, it isn't sharable. */
2557 if (GET_CODE (XEXP (x, 0)) == PLUS
2558 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2559 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2560 return;
2561 break;
2563 case INSN:
2564 case JUMP_INSN:
2565 case CALL_INSN:
2566 case NOTE:
2567 case BARRIER:
2568 /* The chain of insns is not being copied. */
2569 return;
2571 default:
2572 break;
2575 /* This rtx may not be shared. If it has already been seen,
2576 replace it with a copy of itself. */
2578 if (RTX_FLAG (x, used))
2580 rtx copy;
2582 copy = rtx_alloc (code);
2583 memcpy (copy, x, RTX_SIZE (code));
2584 x = copy;
2585 copied = 1;
2587 RTX_FLAG (x, used) = 1;
2589 /* Now scan the subexpressions recursively.
2590 We can store any replaced subexpressions directly into X
2591 since we know X is not shared! Any vectors in X
2592 must be copied if X was copied. */
2594 format_ptr = GET_RTX_FORMAT (code);
2595 length = GET_RTX_LENGTH (code);
2596 last_ptr = NULL;
2598 for (i = 0; i < length; i++)
2600 switch (*format_ptr++)
2602 case 'e':
2603 if (last_ptr)
2604 copy_rtx_if_shared_1 (last_ptr);
2605 last_ptr = &XEXP (x, i);
2606 break;
2608 case 'E':
2609 if (XVEC (x, i) != NULL)
2611 int j;
2612 int len = XVECLEN (x, i);
2614 /* Copy the vector iff I copied the rtx and the length
2615 is nonzero. */
2616 if (copied && len > 0)
2617 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2619 /* Call recursively on all inside the vector. */
2620 for (j = 0; j < len; j++)
2622 if (last_ptr)
2623 copy_rtx_if_shared_1 (last_ptr);
2624 last_ptr = &XVECEXP (x, i, j);
2627 break;
2630 *orig1 = x;
2631 if (last_ptr)
2633 orig1 = last_ptr;
2634 goto repeat;
2636 return;
2639 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2640 to look for shared sub-parts. */
2642 void
2643 reset_used_flags (rtx x)
2645 int i, j;
2646 enum rtx_code code;
2647 const char *format_ptr;
2648 int length;
2650 /* Repeat is used to turn tail-recursion into iteration. */
2651 repeat:
2652 if (x == 0)
2653 return;
2655 code = GET_CODE (x);
2657 /* These types may be freely shared so we needn't do any resetting
2658 for them. */
2660 switch (code)
2662 case REG:
2663 case CONST_INT:
2664 case CONST_DOUBLE:
2665 case CONST_VECTOR:
2666 case SYMBOL_REF:
2667 case CODE_LABEL:
2668 case PC:
2669 case CC0:
2670 return;
2672 case INSN:
2673 case JUMP_INSN:
2674 case CALL_INSN:
2675 case NOTE:
2676 case LABEL_REF:
2677 case BARRIER:
2678 /* The chain of insns is not being copied. */
2679 return;
2681 default:
2682 break;
2685 RTX_FLAG (x, used) = 0;
2687 format_ptr = GET_RTX_FORMAT (code);
2688 length = GET_RTX_LENGTH (code);
2690 for (i = 0; i < length; i++)
2692 switch (*format_ptr++)
2694 case 'e':
2695 if (i == length-1)
2697 x = XEXP (x, i);
2698 goto repeat;
2700 reset_used_flags (XEXP (x, i));
2701 break;
2703 case 'E':
2704 for (j = 0; j < XVECLEN (x, i); j++)
2705 reset_used_flags (XVECEXP (x, i, j));
2706 break;
2711 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2712 to look for shared sub-parts. */
2714 void
2715 set_used_flags (rtx x)
2717 int i, j;
2718 enum rtx_code code;
2719 const char *format_ptr;
2721 if (x == 0)
2722 return;
2724 code = GET_CODE (x);
2726 /* These types may be freely shared so we needn't do any resetting
2727 for them. */
2729 switch (code)
2731 case REG:
2732 case CONST_INT:
2733 case CONST_DOUBLE:
2734 case CONST_VECTOR:
2735 case SYMBOL_REF:
2736 case CODE_LABEL:
2737 case PC:
2738 case CC0:
2739 return;
2741 case INSN:
2742 case JUMP_INSN:
2743 case CALL_INSN:
2744 case NOTE:
2745 case LABEL_REF:
2746 case BARRIER:
2747 /* The chain of insns is not being copied. */
2748 return;
2750 default:
2751 break;
2754 RTX_FLAG (x, used) = 1;
2756 format_ptr = GET_RTX_FORMAT (code);
2757 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2759 switch (*format_ptr++)
2761 case 'e':
2762 set_used_flags (XEXP (x, i));
2763 break;
2765 case 'E':
2766 for (j = 0; j < XVECLEN (x, i); j++)
2767 set_used_flags (XVECEXP (x, i, j));
2768 break;
2773 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2774 Return X or the rtx for the pseudo reg the value of X was copied into.
2775 OTHER must be valid as a SET_DEST. */
2778 make_safe_from (rtx x, rtx other)
2780 while (1)
2781 switch (GET_CODE (other))
2783 case SUBREG:
2784 other = SUBREG_REG (other);
2785 break;
2786 case STRICT_LOW_PART:
2787 case SIGN_EXTEND:
2788 case ZERO_EXTEND:
2789 other = XEXP (other, 0);
2790 break;
2791 default:
2792 goto done;
2794 done:
2795 if ((MEM_P (other)
2796 && ! CONSTANT_P (x)
2797 && !REG_P (x)
2798 && GET_CODE (x) != SUBREG)
2799 || (REG_P (other)
2800 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2801 || reg_mentioned_p (other, x))))
2803 rtx temp = gen_reg_rtx (GET_MODE (x));
2804 emit_move_insn (temp, x);
2805 return temp;
2807 return x;
2810 /* Emission of insns (adding them to the doubly-linked list). */
2812 /* Return the first insn of the current sequence or current function. */
2815 get_insns (void)
2817 return first_insn;
2820 /* Specify a new insn as the first in the chain. */
2822 void
2823 set_first_insn (rtx insn)
2825 if (PREV_INSN (insn) != 0)
2826 abort ();
2827 first_insn = insn;
2830 /* Return the last insn emitted in current sequence or current function. */
2833 get_last_insn (void)
2835 return last_insn;
2838 /* Specify a new insn as the last in the chain. */
2840 void
2841 set_last_insn (rtx insn)
2843 if (NEXT_INSN (insn) != 0)
2844 abort ();
2845 last_insn = insn;
2848 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2851 get_last_insn_anywhere (void)
2853 struct sequence_stack *stack;
2854 if (last_insn)
2855 return last_insn;
2856 for (stack = seq_stack; stack; stack = stack->next)
2857 if (stack->last != 0)
2858 return stack->last;
2859 return 0;
2862 /* Return the first nonnote insn emitted in current sequence or current
2863 function. This routine looks inside SEQUENCEs. */
2866 get_first_nonnote_insn (void)
2868 rtx insn = first_insn;
2870 while (insn)
2872 insn = next_insn (insn);
2873 if (insn == 0 || !NOTE_P (insn))
2874 break;
2877 return insn;
2880 /* Return the last nonnote insn emitted in current sequence or current
2881 function. This routine looks inside SEQUENCEs. */
2884 get_last_nonnote_insn (void)
2886 rtx insn = last_insn;
2888 while (insn)
2890 insn = previous_insn (insn);
2891 if (insn == 0 || !NOTE_P (insn))
2892 break;
2895 return insn;
2898 /* Return a number larger than any instruction's uid in this function. */
2901 get_max_uid (void)
2903 return cur_insn_uid;
2906 /* Renumber instructions so that no instruction UIDs are wasted. */
2908 void
2909 renumber_insns (FILE *stream)
2911 rtx insn;
2913 /* If we're not supposed to renumber instructions, don't. */
2914 if (!flag_renumber_insns)
2915 return;
2917 /* If there aren't that many instructions, then it's not really
2918 worth renumbering them. */
2919 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2920 return;
2922 cur_insn_uid = 1;
2924 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2926 if (stream)
2927 fprintf (stream, "Renumbering insn %d to %d\n",
2928 INSN_UID (insn), cur_insn_uid);
2929 INSN_UID (insn) = cur_insn_uid++;
2933 /* Return the next insn. If it is a SEQUENCE, return the first insn
2934 of the sequence. */
2937 next_insn (rtx insn)
2939 if (insn)
2941 insn = NEXT_INSN (insn);
2942 if (insn && NONJUMP_INSN_P (insn)
2943 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2944 insn = XVECEXP (PATTERN (insn), 0, 0);
2947 return insn;
2950 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2951 of the sequence. */
2954 previous_insn (rtx insn)
2956 if (insn)
2958 insn = PREV_INSN (insn);
2959 if (insn && NONJUMP_INSN_P (insn)
2960 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2961 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2964 return insn;
2967 /* Return the next insn after INSN that is not a NOTE. This routine does not
2968 look inside SEQUENCEs. */
2971 next_nonnote_insn (rtx insn)
2973 while (insn)
2975 insn = NEXT_INSN (insn);
2976 if (insn == 0 || !NOTE_P (insn))
2977 break;
2980 return insn;
2983 /* Return the previous insn before INSN that is not a NOTE. This routine does
2984 not look inside SEQUENCEs. */
2987 prev_nonnote_insn (rtx insn)
2989 while (insn)
2991 insn = PREV_INSN (insn);
2992 if (insn == 0 || !NOTE_P (insn))
2993 break;
2996 return insn;
2999 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3000 or 0, if there is none. This routine does not look inside
3001 SEQUENCEs. */
3004 next_real_insn (rtx insn)
3006 while (insn)
3008 insn = NEXT_INSN (insn);
3009 if (insn == 0 || INSN_P (insn))
3010 break;
3013 return insn;
3016 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3017 or 0, if there is none. This routine does not look inside
3018 SEQUENCEs. */
3021 prev_real_insn (rtx insn)
3023 while (insn)
3025 insn = PREV_INSN (insn);
3026 if (insn == 0 || INSN_P (insn))
3027 break;
3030 return insn;
3033 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3034 This routine does not look inside SEQUENCEs. */
3037 last_call_insn (void)
3039 rtx insn;
3041 for (insn = get_last_insn ();
3042 insn && !CALL_P (insn);
3043 insn = PREV_INSN (insn))
3046 return insn;
3049 /* Find the next insn after INSN that really does something. This routine
3050 does not look inside SEQUENCEs. Until reload has completed, this is the
3051 same as next_real_insn. */
3054 active_insn_p (rtx insn)
3056 return (CALL_P (insn) || JUMP_P (insn)
3057 || (NONJUMP_INSN_P (insn)
3058 && (! reload_completed
3059 || (GET_CODE (PATTERN (insn)) != USE
3060 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3064 next_active_insn (rtx insn)
3066 while (insn)
3068 insn = NEXT_INSN (insn);
3069 if (insn == 0 || active_insn_p (insn))
3070 break;
3073 return insn;
3076 /* Find the last insn before INSN that really does something. This routine
3077 does not look inside SEQUENCEs. Until reload has completed, this is the
3078 same as prev_real_insn. */
3081 prev_active_insn (rtx insn)
3083 while (insn)
3085 insn = PREV_INSN (insn);
3086 if (insn == 0 || active_insn_p (insn))
3087 break;
3090 return insn;
3093 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3096 next_label (rtx insn)
3098 while (insn)
3100 insn = NEXT_INSN (insn);
3101 if (insn == 0 || LABEL_P (insn))
3102 break;
3105 return insn;
3108 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3111 prev_label (rtx insn)
3113 while (insn)
3115 insn = PREV_INSN (insn);
3116 if (insn == 0 || LABEL_P (insn))
3117 break;
3120 return insn;
3123 /* Return the last label to mark the same position as LABEL. Return null
3124 if LABEL itself is null. */
3127 skip_consecutive_labels (rtx label)
3129 rtx insn;
3131 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3132 if (LABEL_P (insn))
3133 label = insn;
3135 return label;
3138 #ifdef HAVE_cc0
3139 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3140 and REG_CC_USER notes so we can find it. */
3142 void
3143 link_cc0_insns (rtx insn)
3145 rtx user = next_nonnote_insn (insn);
3147 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3148 user = XVECEXP (PATTERN (user), 0, 0);
3150 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3151 REG_NOTES (user));
3152 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3155 /* Return the next insn that uses CC0 after INSN, which is assumed to
3156 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3157 applied to the result of this function should yield INSN).
3159 Normally, this is simply the next insn. However, if a REG_CC_USER note
3160 is present, it contains the insn that uses CC0.
3162 Return 0 if we can't find the insn. */
3165 next_cc0_user (rtx insn)
3167 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3169 if (note)
3170 return XEXP (note, 0);
3172 insn = next_nonnote_insn (insn);
3173 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3174 insn = XVECEXP (PATTERN (insn), 0, 0);
3176 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3177 return insn;
3179 return 0;
3182 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3183 note, it is the previous insn. */
3186 prev_cc0_setter (rtx insn)
3188 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3190 if (note)
3191 return XEXP (note, 0);
3193 insn = prev_nonnote_insn (insn);
3194 if (! sets_cc0_p (PATTERN (insn)))
3195 abort ();
3197 return insn;
3199 #endif
3201 /* Increment the label uses for all labels present in rtx. */
3203 static void
3204 mark_label_nuses (rtx x)
3206 enum rtx_code code;
3207 int i, j;
3208 const char *fmt;
3210 code = GET_CODE (x);
3211 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3212 LABEL_NUSES (XEXP (x, 0))++;
3214 fmt = GET_RTX_FORMAT (code);
3215 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3217 if (fmt[i] == 'e')
3218 mark_label_nuses (XEXP (x, i));
3219 else if (fmt[i] == 'E')
3220 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3221 mark_label_nuses (XVECEXP (x, i, j));
3226 /* Try splitting insns that can be split for better scheduling.
3227 PAT is the pattern which might split.
3228 TRIAL is the insn providing PAT.
3229 LAST is nonzero if we should return the last insn of the sequence produced.
3231 If this routine succeeds in splitting, it returns the first or last
3232 replacement insn depending on the value of LAST. Otherwise, it
3233 returns TRIAL. If the insn to be returned can be split, it will be. */
3236 try_split (rtx pat, rtx trial, int last)
3238 rtx before = PREV_INSN (trial);
3239 rtx after = NEXT_INSN (trial);
3240 int has_barrier = 0;
3241 rtx tem;
3242 rtx note, seq;
3243 int probability;
3244 rtx insn_last, insn;
3245 int njumps = 0;
3247 if (any_condjump_p (trial)
3248 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3249 split_branch_probability = INTVAL (XEXP (note, 0));
3250 probability = split_branch_probability;
3252 seq = split_insns (pat, trial);
3254 split_branch_probability = -1;
3256 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3257 We may need to handle this specially. */
3258 if (after && BARRIER_P (after))
3260 has_barrier = 1;
3261 after = NEXT_INSN (after);
3264 if (!seq)
3265 return trial;
3267 /* Avoid infinite loop if any insn of the result matches
3268 the original pattern. */
3269 insn_last = seq;
3270 while (1)
3272 if (INSN_P (insn_last)
3273 && rtx_equal_p (PATTERN (insn_last), pat))
3274 return trial;
3275 if (!NEXT_INSN (insn_last))
3276 break;
3277 insn_last = NEXT_INSN (insn_last);
3280 /* Mark labels. */
3281 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3283 if (JUMP_P (insn))
3285 mark_jump_label (PATTERN (insn), insn, 0);
3286 njumps++;
3287 if (probability != -1
3288 && any_condjump_p (insn)
3289 && !find_reg_note (insn, REG_BR_PROB, 0))
3291 /* We can preserve the REG_BR_PROB notes only if exactly
3292 one jump is created, otherwise the machine description
3293 is responsible for this step using
3294 split_branch_probability variable. */
3295 if (njumps != 1)
3296 abort ();
3297 REG_NOTES (insn)
3298 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3299 GEN_INT (probability),
3300 REG_NOTES (insn));
3305 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3306 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3307 if (CALL_P (trial))
3309 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3310 if (CALL_P (insn))
3312 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3313 while (*p)
3314 p = &XEXP (*p, 1);
3315 *p = CALL_INSN_FUNCTION_USAGE (trial);
3316 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3320 /* Copy notes, particularly those related to the CFG. */
3321 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3323 switch (REG_NOTE_KIND (note))
3325 case REG_EH_REGION:
3326 insn = insn_last;
3327 while (insn != NULL_RTX)
3329 if (CALL_P (insn)
3330 || (flag_non_call_exceptions && INSN_P (insn)
3331 && may_trap_p (PATTERN (insn))))
3332 REG_NOTES (insn)
3333 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3334 XEXP (note, 0),
3335 REG_NOTES (insn));
3336 insn = PREV_INSN (insn);
3338 break;
3340 case REG_NORETURN:
3341 case REG_SETJMP:
3342 case REG_ALWAYS_RETURN:
3343 insn = insn_last;
3344 while (insn != NULL_RTX)
3346 if (CALL_P (insn))
3347 REG_NOTES (insn)
3348 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3349 XEXP (note, 0),
3350 REG_NOTES (insn));
3351 insn = PREV_INSN (insn);
3353 break;
3355 case REG_NON_LOCAL_GOTO:
3356 insn = insn_last;
3357 while (insn != NULL_RTX)
3359 if (JUMP_P (insn))
3360 REG_NOTES (insn)
3361 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3362 XEXP (note, 0),
3363 REG_NOTES (insn));
3364 insn = PREV_INSN (insn);
3366 break;
3368 default:
3369 break;
3373 /* If there are LABELS inside the split insns increment the
3374 usage count so we don't delete the label. */
3375 if (NONJUMP_INSN_P (trial))
3377 insn = insn_last;
3378 while (insn != NULL_RTX)
3380 if (NONJUMP_INSN_P (insn))
3381 mark_label_nuses (PATTERN (insn));
3383 insn = PREV_INSN (insn);
3387 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3389 delete_insn (trial);
3390 if (has_barrier)
3391 emit_barrier_after (tem);
3393 /* Recursively call try_split for each new insn created; by the
3394 time control returns here that insn will be fully split, so
3395 set LAST and continue from the insn after the one returned.
3396 We can't use next_active_insn here since AFTER may be a note.
3397 Ignore deleted insns, which can be occur if not optimizing. */
3398 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3399 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3400 tem = try_split (PATTERN (tem), tem, 1);
3402 /* Return either the first or the last insn, depending on which was
3403 requested. */
3404 return last
3405 ? (after ? PREV_INSN (after) : last_insn)
3406 : NEXT_INSN (before);
3409 /* Make and return an INSN rtx, initializing all its slots.
3410 Store PATTERN in the pattern slots. */
3413 make_insn_raw (rtx pattern)
3415 rtx insn;
3417 insn = rtx_alloc (INSN);
3419 INSN_UID (insn) = cur_insn_uid++;
3420 PATTERN (insn) = pattern;
3421 INSN_CODE (insn) = -1;
3422 LOG_LINKS (insn) = NULL;
3423 REG_NOTES (insn) = NULL;
3424 INSN_LOCATOR (insn) = 0;
3425 BLOCK_FOR_INSN (insn) = NULL;
3427 #ifdef ENABLE_RTL_CHECKING
3428 if (insn
3429 && INSN_P (insn)
3430 && (returnjump_p (insn)
3431 || (GET_CODE (insn) == SET
3432 && SET_DEST (insn) == pc_rtx)))
3434 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3435 debug_rtx (insn);
3437 #endif
3439 return insn;
3442 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3444 static rtx
3445 make_jump_insn_raw (rtx pattern)
3447 rtx insn;
3449 insn = rtx_alloc (JUMP_INSN);
3450 INSN_UID (insn) = cur_insn_uid++;
3452 PATTERN (insn) = pattern;
3453 INSN_CODE (insn) = -1;
3454 LOG_LINKS (insn) = NULL;
3455 REG_NOTES (insn) = NULL;
3456 JUMP_LABEL (insn) = NULL;
3457 INSN_LOCATOR (insn) = 0;
3458 BLOCK_FOR_INSN (insn) = NULL;
3460 return insn;
3463 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3465 static rtx
3466 make_call_insn_raw (rtx pattern)
3468 rtx insn;
3470 insn = rtx_alloc (CALL_INSN);
3471 INSN_UID (insn) = cur_insn_uid++;
3473 PATTERN (insn) = pattern;
3474 INSN_CODE (insn) = -1;
3475 LOG_LINKS (insn) = NULL;
3476 REG_NOTES (insn) = NULL;
3477 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3478 INSN_LOCATOR (insn) = 0;
3479 BLOCK_FOR_INSN (insn) = NULL;
3481 return insn;
3484 /* Add INSN to the end of the doubly-linked list.
3485 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3487 void
3488 add_insn (rtx insn)
3490 PREV_INSN (insn) = last_insn;
3491 NEXT_INSN (insn) = 0;
3493 if (NULL != last_insn)
3494 NEXT_INSN (last_insn) = insn;
3496 if (NULL == first_insn)
3497 first_insn = insn;
3499 last_insn = insn;
3502 /* Add INSN into the doubly-linked list after insn AFTER. This and
3503 the next should be the only functions called to insert an insn once
3504 delay slots have been filled since only they know how to update a
3505 SEQUENCE. */
3507 void
3508 add_insn_after (rtx insn, rtx after)
3510 rtx next = NEXT_INSN (after);
3511 basic_block bb;
3513 if (optimize && INSN_DELETED_P (after))
3514 abort ();
3516 NEXT_INSN (insn) = next;
3517 PREV_INSN (insn) = after;
3519 if (next)
3521 PREV_INSN (next) = insn;
3522 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3523 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3525 else if (last_insn == after)
3526 last_insn = insn;
3527 else
3529 struct sequence_stack *stack = seq_stack;
3530 /* Scan all pending sequences too. */
3531 for (; stack; stack = stack->next)
3532 if (after == stack->last)
3534 stack->last = insn;
3535 break;
3538 if (stack == 0)
3539 abort ();
3542 if (!BARRIER_P (after)
3543 && !BARRIER_P (insn)
3544 && (bb = BLOCK_FOR_INSN (after)))
3546 set_block_for_insn (insn, bb);
3547 if (INSN_P (insn))
3548 bb->flags |= BB_DIRTY;
3549 /* Should not happen as first in the BB is always
3550 either NOTE or LABEL. */
3551 if (BB_END (bb) == after
3552 /* Avoid clobbering of structure when creating new BB. */
3553 && !BARRIER_P (insn)
3554 && (!NOTE_P (insn)
3555 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3556 BB_END (bb) = insn;
3559 NEXT_INSN (after) = insn;
3560 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3562 rtx sequence = PATTERN (after);
3563 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3567 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3568 the previous should be the only functions called to insert an insn once
3569 delay slots have been filled since only they know how to update a
3570 SEQUENCE. */
3572 void
3573 add_insn_before (rtx insn, rtx before)
3575 rtx prev = PREV_INSN (before);
3576 basic_block bb;
3578 if (optimize && INSN_DELETED_P (before))
3579 abort ();
3581 PREV_INSN (insn) = prev;
3582 NEXT_INSN (insn) = before;
3584 if (prev)
3586 NEXT_INSN (prev) = insn;
3587 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3589 rtx sequence = PATTERN (prev);
3590 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3593 else if (first_insn == before)
3594 first_insn = insn;
3595 else
3597 struct sequence_stack *stack = seq_stack;
3598 /* Scan all pending sequences too. */
3599 for (; stack; stack = stack->next)
3600 if (before == stack->first)
3602 stack->first = insn;
3603 break;
3606 if (stack == 0)
3607 abort ();
3610 if (!BARRIER_P (before)
3611 && !BARRIER_P (insn)
3612 && (bb = BLOCK_FOR_INSN (before)))
3614 set_block_for_insn (insn, bb);
3615 if (INSN_P (insn))
3616 bb->flags |= BB_DIRTY;
3617 /* Should not happen as first in the BB is always
3618 either NOTE or LABEl. */
3619 if (BB_HEAD (bb) == insn
3620 /* Avoid clobbering of structure when creating new BB. */
3621 && !BARRIER_P (insn)
3622 && (!NOTE_P (insn)
3623 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3624 abort ();
3627 PREV_INSN (before) = insn;
3628 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3629 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3632 /* Remove an insn from its doubly-linked list. This function knows how
3633 to handle sequences. */
3634 void
3635 remove_insn (rtx insn)
3637 rtx next = NEXT_INSN (insn);
3638 rtx prev = PREV_INSN (insn);
3639 basic_block bb;
3641 if (prev)
3643 NEXT_INSN (prev) = next;
3644 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3646 rtx sequence = PATTERN (prev);
3647 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3650 else if (first_insn == insn)
3651 first_insn = next;
3652 else
3654 struct sequence_stack *stack = seq_stack;
3655 /* Scan all pending sequences too. */
3656 for (; stack; stack = stack->next)
3657 if (insn == stack->first)
3659 stack->first = next;
3660 break;
3663 if (stack == 0)
3664 abort ();
3667 if (next)
3669 PREV_INSN (next) = prev;
3670 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3671 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3673 else if (last_insn == insn)
3674 last_insn = prev;
3675 else
3677 struct sequence_stack *stack = seq_stack;
3678 /* Scan all pending sequences too. */
3679 for (; stack; stack = stack->next)
3680 if (insn == stack->last)
3682 stack->last = prev;
3683 break;
3686 if (stack == 0)
3687 abort ();
3689 if (!BARRIER_P (insn)
3690 && (bb = BLOCK_FOR_INSN (insn)))
3692 if (INSN_P (insn))
3693 bb->flags |= BB_DIRTY;
3694 if (BB_HEAD (bb) == insn)
3696 /* Never ever delete the basic block note without deleting whole
3697 basic block. */
3698 if (NOTE_P (insn))
3699 abort ();
3700 BB_HEAD (bb) = next;
3702 if (BB_END (bb) == insn)
3703 BB_END (bb) = prev;
3707 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3709 void
3710 add_function_usage_to (rtx call_insn, rtx call_fusage)
3712 if (! call_insn || !CALL_P (call_insn))
3713 abort ();
3715 /* Put the register usage information on the CALL. If there is already
3716 some usage information, put ours at the end. */
3717 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3719 rtx link;
3721 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3722 link = XEXP (link, 1))
3725 XEXP (link, 1) = call_fusage;
3727 else
3728 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3731 /* Delete all insns made since FROM.
3732 FROM becomes the new last instruction. */
3734 void
3735 delete_insns_since (rtx from)
3737 if (from == 0)
3738 first_insn = 0;
3739 else
3740 NEXT_INSN (from) = 0;
3741 last_insn = from;
3744 /* This function is deprecated, please use sequences instead.
3746 Move a consecutive bunch of insns to a different place in the chain.
3747 The insns to be moved are those between FROM and TO.
3748 They are moved to a new position after the insn AFTER.
3749 AFTER must not be FROM or TO or any insn in between.
3751 This function does not know about SEQUENCEs and hence should not be
3752 called after delay-slot filling has been done. */
3754 void
3755 reorder_insns_nobb (rtx from, rtx to, rtx after)
3757 /* Splice this bunch out of where it is now. */
3758 if (PREV_INSN (from))
3759 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3760 if (NEXT_INSN (to))
3761 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3762 if (last_insn == to)
3763 last_insn = PREV_INSN (from);
3764 if (first_insn == from)
3765 first_insn = NEXT_INSN (to);
3767 /* Make the new neighbors point to it and it to them. */
3768 if (NEXT_INSN (after))
3769 PREV_INSN (NEXT_INSN (after)) = to;
3771 NEXT_INSN (to) = NEXT_INSN (after);
3772 PREV_INSN (from) = after;
3773 NEXT_INSN (after) = from;
3774 if (after == last_insn)
3775 last_insn = to;
3778 /* Same as function above, but take care to update BB boundaries. */
3779 void
3780 reorder_insns (rtx from, rtx to, rtx after)
3782 rtx prev = PREV_INSN (from);
3783 basic_block bb, bb2;
3785 reorder_insns_nobb (from, to, after);
3787 if (!BARRIER_P (after)
3788 && (bb = BLOCK_FOR_INSN (after)))
3790 rtx x;
3791 bb->flags |= BB_DIRTY;
3793 if (!BARRIER_P (from)
3794 && (bb2 = BLOCK_FOR_INSN (from)))
3796 if (BB_END (bb2) == to)
3797 BB_END (bb2) = prev;
3798 bb2->flags |= BB_DIRTY;
3801 if (BB_END (bb) == after)
3802 BB_END (bb) = to;
3804 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3805 if (!BARRIER_P (x))
3806 set_block_for_insn (x, bb);
3810 /* Return the line note insn preceding INSN. */
3812 static rtx
3813 find_line_note (rtx insn)
3815 if (no_line_numbers)
3816 return 0;
3818 for (; insn; insn = PREV_INSN (insn))
3819 if (NOTE_P (insn)
3820 && NOTE_LINE_NUMBER (insn) >= 0)
3821 break;
3823 return insn;
3826 /* Remove unnecessary notes from the instruction stream. */
3828 void
3829 remove_unnecessary_notes (void)
3831 rtx block_stack = NULL_RTX;
3832 rtx eh_stack = NULL_RTX;
3833 rtx insn;
3834 rtx next;
3835 rtx tmp;
3837 /* We must not remove the first instruction in the function because
3838 the compiler depends on the first instruction being a note. */
3839 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3841 /* Remember what's next. */
3842 next = NEXT_INSN (insn);
3844 /* We're only interested in notes. */
3845 if (!NOTE_P (insn))
3846 continue;
3848 switch (NOTE_LINE_NUMBER (insn))
3850 case NOTE_INSN_DELETED:
3851 remove_insn (insn);
3852 break;
3854 case NOTE_INSN_EH_REGION_BEG:
3855 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3856 break;
3858 case NOTE_INSN_EH_REGION_END:
3859 /* Too many end notes. */
3860 if (eh_stack == NULL_RTX)
3861 abort ();
3862 /* Mismatched nesting. */
3863 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3864 abort ();
3865 tmp = eh_stack;
3866 eh_stack = XEXP (eh_stack, 1);
3867 free_INSN_LIST_node (tmp);
3868 break;
3870 case NOTE_INSN_BLOCK_BEG:
3871 /* By now, all notes indicating lexical blocks should have
3872 NOTE_BLOCK filled in. */
3873 if (NOTE_BLOCK (insn) == NULL_TREE)
3874 abort ();
3875 block_stack = alloc_INSN_LIST (insn, block_stack);
3876 break;
3878 case NOTE_INSN_BLOCK_END:
3879 /* Too many end notes. */
3880 if (block_stack == NULL_RTX)
3881 abort ();
3882 /* Mismatched nesting. */
3883 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3884 abort ();
3885 tmp = block_stack;
3886 block_stack = XEXP (block_stack, 1);
3887 free_INSN_LIST_node (tmp);
3889 /* Scan back to see if there are any non-note instructions
3890 between INSN and the beginning of this block. If not,
3891 then there is no PC range in the generated code that will
3892 actually be in this block, so there's no point in
3893 remembering the existence of the block. */
3894 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3896 /* This block contains a real instruction. Note that we
3897 don't include labels; if the only thing in the block
3898 is a label, then there are still no PC values that
3899 lie within the block. */
3900 if (INSN_P (tmp))
3901 break;
3903 /* We're only interested in NOTEs. */
3904 if (!NOTE_P (tmp))
3905 continue;
3907 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3909 /* We just verified that this BLOCK matches us with
3910 the block_stack check above. Never delete the
3911 BLOCK for the outermost scope of the function; we
3912 can refer to names from that scope even if the
3913 block notes are messed up. */
3914 if (! is_body_block (NOTE_BLOCK (insn))
3915 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3917 remove_insn (tmp);
3918 remove_insn (insn);
3920 break;
3922 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3923 /* There's a nested block. We need to leave the
3924 current block in place since otherwise the debugger
3925 wouldn't be able to show symbols from our block in
3926 the nested block. */
3927 break;
3932 /* Too many begin notes. */
3933 if (block_stack || eh_stack)
3934 abort ();
3938 /* Emit insn(s) of given code and pattern
3939 at a specified place within the doubly-linked list.
3941 All of the emit_foo global entry points accept an object
3942 X which is either an insn list or a PATTERN of a single
3943 instruction.
3945 There are thus a few canonical ways to generate code and
3946 emit it at a specific place in the instruction stream. For
3947 example, consider the instruction named SPOT and the fact that
3948 we would like to emit some instructions before SPOT. We might
3949 do it like this:
3951 start_sequence ();
3952 ... emit the new instructions ...
3953 insns_head = get_insns ();
3954 end_sequence ();
3956 emit_insn_before (insns_head, SPOT);
3958 It used to be common to generate SEQUENCE rtl instead, but that
3959 is a relic of the past which no longer occurs. The reason is that
3960 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3961 generated would almost certainly die right after it was created. */
3963 /* Make X be output before the instruction BEFORE. */
3966 emit_insn_before (rtx x, rtx before)
3968 rtx last = before;
3969 rtx insn;
3971 #ifdef ENABLE_RTL_CHECKING
3972 if (before == NULL_RTX)
3973 abort ();
3974 #endif
3976 if (x == NULL_RTX)
3977 return last;
3979 switch (GET_CODE (x))
3981 case INSN:
3982 case JUMP_INSN:
3983 case CALL_INSN:
3984 case CODE_LABEL:
3985 case BARRIER:
3986 case NOTE:
3987 insn = x;
3988 while (insn)
3990 rtx next = NEXT_INSN (insn);
3991 add_insn_before (insn, before);
3992 last = insn;
3993 insn = next;
3995 break;
3997 #ifdef ENABLE_RTL_CHECKING
3998 case SEQUENCE:
3999 abort ();
4000 break;
4001 #endif
4003 default:
4004 last = make_insn_raw (x);
4005 add_insn_before (last, before);
4006 break;
4009 return last;
4012 /* Make an instruction with body X and code JUMP_INSN
4013 and output it before the instruction BEFORE. */
4016 emit_jump_insn_before (rtx x, rtx before)
4018 rtx insn, last = NULL_RTX;
4020 #ifdef ENABLE_RTL_CHECKING
4021 if (before == NULL_RTX)
4022 abort ();
4023 #endif
4025 switch (GET_CODE (x))
4027 case INSN:
4028 case JUMP_INSN:
4029 case CALL_INSN:
4030 case CODE_LABEL:
4031 case BARRIER:
4032 case NOTE:
4033 insn = x;
4034 while (insn)
4036 rtx next = NEXT_INSN (insn);
4037 add_insn_before (insn, before);
4038 last = insn;
4039 insn = next;
4041 break;
4043 #ifdef ENABLE_RTL_CHECKING
4044 case SEQUENCE:
4045 abort ();
4046 break;
4047 #endif
4049 default:
4050 last = make_jump_insn_raw (x);
4051 add_insn_before (last, before);
4052 break;
4055 return last;
4058 /* Make an instruction with body X and code CALL_INSN
4059 and output it before the instruction BEFORE. */
4062 emit_call_insn_before (rtx x, rtx before)
4064 rtx last = NULL_RTX, insn;
4066 #ifdef ENABLE_RTL_CHECKING
4067 if (before == NULL_RTX)
4068 abort ();
4069 #endif
4071 switch (GET_CODE (x))
4073 case INSN:
4074 case JUMP_INSN:
4075 case CALL_INSN:
4076 case CODE_LABEL:
4077 case BARRIER:
4078 case NOTE:
4079 insn = x;
4080 while (insn)
4082 rtx next = NEXT_INSN (insn);
4083 add_insn_before (insn, before);
4084 last = insn;
4085 insn = next;
4087 break;
4089 #ifdef ENABLE_RTL_CHECKING
4090 case SEQUENCE:
4091 abort ();
4092 break;
4093 #endif
4095 default:
4096 last = make_call_insn_raw (x);
4097 add_insn_before (last, before);
4098 break;
4101 return last;
4104 /* Make an insn of code BARRIER
4105 and output it before the insn BEFORE. */
4108 emit_barrier_before (rtx before)
4110 rtx insn = rtx_alloc (BARRIER);
4112 INSN_UID (insn) = cur_insn_uid++;
4114 add_insn_before (insn, before);
4115 return insn;
4118 /* Emit the label LABEL before the insn BEFORE. */
4121 emit_label_before (rtx label, rtx before)
4123 /* This can be called twice for the same label as a result of the
4124 confusion that follows a syntax error! So make it harmless. */
4125 if (INSN_UID (label) == 0)
4127 INSN_UID (label) = cur_insn_uid++;
4128 add_insn_before (label, before);
4131 return label;
4134 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4137 emit_note_before (int subtype, rtx before)
4139 rtx note = rtx_alloc (NOTE);
4140 INSN_UID (note) = cur_insn_uid++;
4141 #ifndef USE_MAPPED_LOCATION
4142 NOTE_SOURCE_FILE (note) = 0;
4143 #endif
4144 NOTE_LINE_NUMBER (note) = subtype;
4145 BLOCK_FOR_INSN (note) = NULL;
4147 add_insn_before (note, before);
4148 return note;
4151 /* Helper for emit_insn_after, handles lists of instructions
4152 efficiently. */
4154 static rtx emit_insn_after_1 (rtx, rtx);
4156 static rtx
4157 emit_insn_after_1 (rtx first, rtx after)
4159 rtx last;
4160 rtx after_after;
4161 basic_block bb;
4163 if (!BARRIER_P (after)
4164 && (bb = BLOCK_FOR_INSN (after)))
4166 bb->flags |= BB_DIRTY;
4167 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4168 if (!BARRIER_P (last))
4169 set_block_for_insn (last, bb);
4170 if (!BARRIER_P (last))
4171 set_block_for_insn (last, bb);
4172 if (BB_END (bb) == after)
4173 BB_END (bb) = last;
4175 else
4176 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4177 continue;
4179 after_after = NEXT_INSN (after);
4181 NEXT_INSN (after) = first;
4182 PREV_INSN (first) = after;
4183 NEXT_INSN (last) = after_after;
4184 if (after_after)
4185 PREV_INSN (after_after) = last;
4187 if (after == last_insn)
4188 last_insn = last;
4189 return last;
4192 /* Make X be output after the insn AFTER. */
4195 emit_insn_after (rtx x, rtx after)
4197 rtx last = after;
4199 #ifdef ENABLE_RTL_CHECKING
4200 if (after == NULL_RTX)
4201 abort ();
4202 #endif
4204 if (x == NULL_RTX)
4205 return last;
4207 switch (GET_CODE (x))
4209 case INSN:
4210 case JUMP_INSN:
4211 case CALL_INSN:
4212 case CODE_LABEL:
4213 case BARRIER:
4214 case NOTE:
4215 last = emit_insn_after_1 (x, after);
4216 break;
4218 #ifdef ENABLE_RTL_CHECKING
4219 case SEQUENCE:
4220 abort ();
4221 break;
4222 #endif
4224 default:
4225 last = make_insn_raw (x);
4226 add_insn_after (last, after);
4227 break;
4230 return last;
4233 /* Similar to emit_insn_after, except that line notes are to be inserted so
4234 as to act as if this insn were at FROM. */
4236 void
4237 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4239 rtx from_line = find_line_note (from);
4240 rtx after_line = find_line_note (after);
4241 rtx insn = emit_insn_after (x, after);
4243 if (from_line)
4244 emit_note_copy_after (from_line, after);
4246 if (after_line)
4247 emit_note_copy_after (after_line, insn);
4250 /* Make an insn of code JUMP_INSN with body X
4251 and output it after the insn AFTER. */
4254 emit_jump_insn_after (rtx x, rtx after)
4256 rtx last;
4258 #ifdef ENABLE_RTL_CHECKING
4259 if (after == NULL_RTX)
4260 abort ();
4261 #endif
4263 switch (GET_CODE (x))
4265 case INSN:
4266 case JUMP_INSN:
4267 case CALL_INSN:
4268 case CODE_LABEL:
4269 case BARRIER:
4270 case NOTE:
4271 last = emit_insn_after_1 (x, after);
4272 break;
4274 #ifdef ENABLE_RTL_CHECKING
4275 case SEQUENCE:
4276 abort ();
4277 break;
4278 #endif
4280 default:
4281 last = make_jump_insn_raw (x);
4282 add_insn_after (last, after);
4283 break;
4286 return last;
4289 /* Make an instruction with body X and code CALL_INSN
4290 and output it after the instruction AFTER. */
4293 emit_call_insn_after (rtx x, rtx after)
4295 rtx last;
4297 #ifdef ENABLE_RTL_CHECKING
4298 if (after == NULL_RTX)
4299 abort ();
4300 #endif
4302 switch (GET_CODE (x))
4304 case INSN:
4305 case JUMP_INSN:
4306 case CALL_INSN:
4307 case CODE_LABEL:
4308 case BARRIER:
4309 case NOTE:
4310 last = emit_insn_after_1 (x, after);
4311 break;
4313 #ifdef ENABLE_RTL_CHECKING
4314 case SEQUENCE:
4315 abort ();
4316 break;
4317 #endif
4319 default:
4320 last = make_call_insn_raw (x);
4321 add_insn_after (last, after);
4322 break;
4325 return last;
4328 /* Make an insn of code BARRIER
4329 and output it after the insn AFTER. */
4332 emit_barrier_after (rtx after)
4334 rtx insn = rtx_alloc (BARRIER);
4336 INSN_UID (insn) = cur_insn_uid++;
4338 add_insn_after (insn, after);
4339 return insn;
4342 /* Emit the label LABEL after the insn AFTER. */
4345 emit_label_after (rtx label, rtx after)
4347 /* This can be called twice for the same label
4348 as a result of the confusion that follows a syntax error!
4349 So make it harmless. */
4350 if (INSN_UID (label) == 0)
4352 INSN_UID (label) = cur_insn_uid++;
4353 add_insn_after (label, after);
4356 return label;
4359 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4362 emit_note_after (int subtype, rtx after)
4364 rtx note = rtx_alloc (NOTE);
4365 INSN_UID (note) = cur_insn_uid++;
4366 #ifndef USE_MAPPED_LOCATION
4367 NOTE_SOURCE_FILE (note) = 0;
4368 #endif
4369 NOTE_LINE_NUMBER (note) = subtype;
4370 BLOCK_FOR_INSN (note) = NULL;
4371 add_insn_after (note, after);
4372 return note;
4375 /* Emit a copy of note ORIG after the insn AFTER. */
4378 emit_note_copy_after (rtx orig, rtx after)
4380 rtx note;
4382 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4384 cur_insn_uid++;
4385 return 0;
4388 note = rtx_alloc (NOTE);
4389 INSN_UID (note) = cur_insn_uid++;
4390 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4391 NOTE_DATA (note) = NOTE_DATA (orig);
4392 BLOCK_FOR_INSN (note) = NULL;
4393 add_insn_after (note, after);
4394 return note;
4397 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4399 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4401 rtx last = emit_insn_after (pattern, after);
4403 if (pattern == NULL_RTX)
4404 return last;
4406 after = NEXT_INSN (after);
4407 while (1)
4409 if (active_insn_p (after))
4410 INSN_LOCATOR (after) = loc;
4411 if (after == last)
4412 break;
4413 after = NEXT_INSN (after);
4415 return last;
4418 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4420 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4422 rtx last = emit_jump_insn_after (pattern, after);
4424 if (pattern == NULL_RTX)
4425 return last;
4427 after = NEXT_INSN (after);
4428 while (1)
4430 if (active_insn_p (after))
4431 INSN_LOCATOR (after) = loc;
4432 if (after == last)
4433 break;
4434 after = NEXT_INSN (after);
4436 return last;
4439 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4441 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4443 rtx last = emit_call_insn_after (pattern, after);
4445 if (pattern == NULL_RTX)
4446 return last;
4448 after = NEXT_INSN (after);
4449 while (1)
4451 if (active_insn_p (after))
4452 INSN_LOCATOR (after) = loc;
4453 if (after == last)
4454 break;
4455 after = NEXT_INSN (after);
4457 return last;
4460 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4462 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4464 rtx first = PREV_INSN (before);
4465 rtx last = emit_insn_before (pattern, before);
4467 if (pattern == NULL_RTX)
4468 return last;
4470 first = NEXT_INSN (first);
4471 while (1)
4473 if (active_insn_p (first))
4474 INSN_LOCATOR (first) = loc;
4475 if (first == last)
4476 break;
4477 first = NEXT_INSN (first);
4479 return last;
4482 /* Take X and emit it at the end of the doubly-linked
4483 INSN list.
4485 Returns the last insn emitted. */
4488 emit_insn (rtx x)
4490 rtx last = last_insn;
4491 rtx insn;
4493 if (x == NULL_RTX)
4494 return last;
4496 switch (GET_CODE (x))
4498 case INSN:
4499 case JUMP_INSN:
4500 case CALL_INSN:
4501 case CODE_LABEL:
4502 case BARRIER:
4503 case NOTE:
4504 insn = x;
4505 while (insn)
4507 rtx next = NEXT_INSN (insn);
4508 add_insn (insn);
4509 last = insn;
4510 insn = next;
4512 break;
4514 #ifdef ENABLE_RTL_CHECKING
4515 case SEQUENCE:
4516 abort ();
4517 break;
4518 #endif
4520 default:
4521 last = make_insn_raw (x);
4522 add_insn (last);
4523 break;
4526 return last;
4529 /* Make an insn of code JUMP_INSN with pattern X
4530 and add it to the end of the doubly-linked list. */
4533 emit_jump_insn (rtx x)
4535 rtx last = NULL_RTX, insn;
4537 switch (GET_CODE (x))
4539 case INSN:
4540 case JUMP_INSN:
4541 case CALL_INSN:
4542 case CODE_LABEL:
4543 case BARRIER:
4544 case NOTE:
4545 insn = x;
4546 while (insn)
4548 rtx next = NEXT_INSN (insn);
4549 add_insn (insn);
4550 last = insn;
4551 insn = next;
4553 break;
4555 #ifdef ENABLE_RTL_CHECKING
4556 case SEQUENCE:
4557 abort ();
4558 break;
4559 #endif
4561 default:
4562 last = make_jump_insn_raw (x);
4563 add_insn (last);
4564 break;
4567 return last;
4570 /* Make an insn of code CALL_INSN with pattern X
4571 and add it to the end of the doubly-linked list. */
4574 emit_call_insn (rtx x)
4576 rtx insn;
4578 switch (GET_CODE (x))
4580 case INSN:
4581 case JUMP_INSN:
4582 case CALL_INSN:
4583 case CODE_LABEL:
4584 case BARRIER:
4585 case NOTE:
4586 insn = emit_insn (x);
4587 break;
4589 #ifdef ENABLE_RTL_CHECKING
4590 case SEQUENCE:
4591 abort ();
4592 break;
4593 #endif
4595 default:
4596 insn = make_call_insn_raw (x);
4597 add_insn (insn);
4598 break;
4601 return insn;
4604 /* Add the label LABEL to the end of the doubly-linked list. */
4607 emit_label (rtx label)
4609 /* This can be called twice for the same label
4610 as a result of the confusion that follows a syntax error!
4611 So make it harmless. */
4612 if (INSN_UID (label) == 0)
4614 INSN_UID (label) = cur_insn_uid++;
4615 add_insn (label);
4617 return label;
4620 /* Make an insn of code BARRIER
4621 and add it to the end of the doubly-linked list. */
4624 emit_barrier (void)
4626 rtx barrier = rtx_alloc (BARRIER);
4627 INSN_UID (barrier) = cur_insn_uid++;
4628 add_insn (barrier);
4629 return barrier;
4632 /* Make line numbering NOTE insn for LOCATION add it to the end
4633 of the doubly-linked list, but only if line-numbers are desired for
4634 debugging info and it doesn't match the previous one. */
4637 emit_line_note (location_t location)
4639 rtx note;
4641 #ifdef USE_MAPPED_LOCATION
4642 if (location == last_location)
4643 return NULL_RTX;
4644 #else
4645 if (location.file && last_location.file
4646 && !strcmp (location.file, last_location.file)
4647 && location.line == last_location.line)
4648 return NULL_RTX;
4649 #endif
4650 last_location = location;
4652 if (no_line_numbers)
4654 cur_insn_uid++;
4655 return NULL_RTX;
4658 #ifdef USE_MAPPED_LOCATION
4659 note = emit_note ((int) location);
4660 #else
4661 note = emit_note (location.line);
4662 NOTE_SOURCE_FILE (note) = location.file;
4663 #endif
4665 return note;
4668 /* Emit a copy of note ORIG. */
4671 emit_note_copy (rtx orig)
4673 rtx note;
4675 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4677 cur_insn_uid++;
4678 return NULL_RTX;
4681 note = rtx_alloc (NOTE);
4683 INSN_UID (note) = cur_insn_uid++;
4684 NOTE_DATA (note) = NOTE_DATA (orig);
4685 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4686 BLOCK_FOR_INSN (note) = NULL;
4687 add_insn (note);
4689 return note;
4692 /* Make an insn of code NOTE or type NOTE_NO
4693 and add it to the end of the doubly-linked list. */
4696 emit_note (int note_no)
4698 rtx note;
4700 note = rtx_alloc (NOTE);
4701 INSN_UID (note) = cur_insn_uid++;
4702 NOTE_LINE_NUMBER (note) = note_no;
4703 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4704 BLOCK_FOR_INSN (note) = NULL;
4705 add_insn (note);
4706 return note;
4709 /* Cause next statement to emit a line note even if the line number
4710 has not changed. */
4712 void
4713 force_next_line_note (void)
4715 #ifdef USE_MAPPED_LOCATION
4716 last_location = -1;
4717 #else
4718 last_location.line = -1;
4719 #endif
4722 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4723 note of this type already exists, remove it first. */
4726 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4728 rtx note = find_reg_note (insn, kind, NULL_RTX);
4730 switch (kind)
4732 case REG_EQUAL:
4733 case REG_EQUIV:
4734 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4735 has multiple sets (some callers assume single_set
4736 means the insn only has one set, when in fact it
4737 means the insn only has one * useful * set). */
4738 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4740 if (note)
4741 abort ();
4742 return NULL_RTX;
4745 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4746 It serves no useful purpose and breaks eliminate_regs. */
4747 if (GET_CODE (datum) == ASM_OPERANDS)
4748 return NULL_RTX;
4749 break;
4751 default:
4752 break;
4755 if (note)
4757 XEXP (note, 0) = datum;
4758 return note;
4761 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4762 return REG_NOTES (insn);
4765 /* Return an indication of which type of insn should have X as a body.
4766 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4768 enum rtx_code
4769 classify_insn (rtx x)
4771 if (LABEL_P (x))
4772 return CODE_LABEL;
4773 if (GET_CODE (x) == CALL)
4774 return CALL_INSN;
4775 if (GET_CODE (x) == RETURN)
4776 return JUMP_INSN;
4777 if (GET_CODE (x) == SET)
4779 if (SET_DEST (x) == pc_rtx)
4780 return JUMP_INSN;
4781 else if (GET_CODE (SET_SRC (x)) == CALL)
4782 return CALL_INSN;
4783 else
4784 return INSN;
4786 if (GET_CODE (x) == PARALLEL)
4788 int j;
4789 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4790 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4791 return CALL_INSN;
4792 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4793 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4794 return JUMP_INSN;
4795 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4796 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4797 return CALL_INSN;
4799 return INSN;
4802 /* Emit the rtl pattern X as an appropriate kind of insn.
4803 If X is a label, it is simply added into the insn chain. */
4806 emit (rtx x)
4808 enum rtx_code code = classify_insn (x);
4810 if (code == CODE_LABEL)
4811 return emit_label (x);
4812 else if (code == INSN)
4813 return emit_insn (x);
4814 else if (code == JUMP_INSN)
4816 rtx insn = emit_jump_insn (x);
4817 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4818 return emit_barrier ();
4819 return insn;
4821 else if (code == CALL_INSN)
4822 return emit_call_insn (x);
4823 else
4824 abort ();
4827 /* Space for free sequence stack entries. */
4828 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4830 /* Begin emitting insns to a sequence. If this sequence will contain
4831 something that might cause the compiler to pop arguments to function
4832 calls (because those pops have previously been deferred; see
4833 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4834 before calling this function. That will ensure that the deferred
4835 pops are not accidentally emitted in the middle of this sequence. */
4837 void
4838 start_sequence (void)
4840 struct sequence_stack *tem;
4842 if (free_sequence_stack != NULL)
4844 tem = free_sequence_stack;
4845 free_sequence_stack = tem->next;
4847 else
4848 tem = ggc_alloc (sizeof (struct sequence_stack));
4850 tem->next = seq_stack;
4851 tem->first = first_insn;
4852 tem->last = last_insn;
4854 seq_stack = tem;
4856 first_insn = 0;
4857 last_insn = 0;
4860 /* Set up the insn chain starting with FIRST as the current sequence,
4861 saving the previously current one. See the documentation for
4862 start_sequence for more information about how to use this function. */
4864 void
4865 push_to_sequence (rtx first)
4867 rtx last;
4869 start_sequence ();
4871 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4873 first_insn = first;
4874 last_insn = last;
4877 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4879 void
4880 push_to_full_sequence (rtx first, rtx last)
4882 start_sequence ();
4883 first_insn = first;
4884 last_insn = last;
4885 /* We really should have the end of the insn chain here. */
4886 if (last && NEXT_INSN (last))
4887 abort ();
4890 /* Set up the outer-level insn chain
4891 as the current sequence, saving the previously current one. */
4893 void
4894 push_topmost_sequence (void)
4896 struct sequence_stack *stack, *top = NULL;
4898 start_sequence ();
4900 for (stack = seq_stack; stack; stack = stack->next)
4901 top = stack;
4903 first_insn = top->first;
4904 last_insn = top->last;
4907 /* After emitting to the outer-level insn chain, update the outer-level
4908 insn chain, and restore the previous saved state. */
4910 void
4911 pop_topmost_sequence (void)
4913 struct sequence_stack *stack, *top = NULL;
4915 for (stack = seq_stack; stack; stack = stack->next)
4916 top = stack;
4918 top->first = first_insn;
4919 top->last = last_insn;
4921 end_sequence ();
4924 /* After emitting to a sequence, restore previous saved state.
4926 To get the contents of the sequence just made, you must call
4927 `get_insns' *before* calling here.
4929 If the compiler might have deferred popping arguments while
4930 generating this sequence, and this sequence will not be immediately
4931 inserted into the instruction stream, use do_pending_stack_adjust
4932 before calling get_insns. That will ensure that the deferred
4933 pops are inserted into this sequence, and not into some random
4934 location in the instruction stream. See INHIBIT_DEFER_POP for more
4935 information about deferred popping of arguments. */
4937 void
4938 end_sequence (void)
4940 struct sequence_stack *tem = seq_stack;
4942 first_insn = tem->first;
4943 last_insn = tem->last;
4944 seq_stack = tem->next;
4946 memset (tem, 0, sizeof (*tem));
4947 tem->next = free_sequence_stack;
4948 free_sequence_stack = tem;
4951 /* Return 1 if currently emitting into a sequence. */
4954 in_sequence_p (void)
4956 return seq_stack != 0;
4959 /* Put the various virtual registers into REGNO_REG_RTX. */
4961 void
4962 init_virtual_regs (struct emit_status *es)
4964 rtx *ptr = es->x_regno_reg_rtx;
4965 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4966 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4967 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4968 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4969 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4973 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4974 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4975 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4976 static int copy_insn_n_scratches;
4978 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4979 copied an ASM_OPERANDS.
4980 In that case, it is the original input-operand vector. */
4981 static rtvec orig_asm_operands_vector;
4983 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4984 copied an ASM_OPERANDS.
4985 In that case, it is the copied input-operand vector. */
4986 static rtvec copy_asm_operands_vector;
4988 /* Likewise for the constraints vector. */
4989 static rtvec orig_asm_constraints_vector;
4990 static rtvec copy_asm_constraints_vector;
4992 /* Recursively create a new copy of an rtx for copy_insn.
4993 This function differs from copy_rtx in that it handles SCRATCHes and
4994 ASM_OPERANDs properly.
4995 Normally, this function is not used directly; use copy_insn as front end.
4996 However, you could first copy an insn pattern with copy_insn and then use
4997 this function afterwards to properly copy any REG_NOTEs containing
4998 SCRATCHes. */
5001 copy_insn_1 (rtx orig)
5003 rtx copy;
5004 int i, j;
5005 RTX_CODE code;
5006 const char *format_ptr;
5008 code = GET_CODE (orig);
5010 switch (code)
5012 case REG:
5013 case CONST_INT:
5014 case CONST_DOUBLE:
5015 case CONST_VECTOR:
5016 case SYMBOL_REF:
5017 case CODE_LABEL:
5018 case PC:
5019 case CC0:
5020 return orig;
5021 case CLOBBER:
5022 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5023 return orig;
5024 break;
5026 case SCRATCH:
5027 for (i = 0; i < copy_insn_n_scratches; i++)
5028 if (copy_insn_scratch_in[i] == orig)
5029 return copy_insn_scratch_out[i];
5030 break;
5032 case CONST:
5033 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5034 a LABEL_REF, it isn't sharable. */
5035 if (GET_CODE (XEXP (orig, 0)) == PLUS
5036 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5037 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5038 return orig;
5039 break;
5041 /* A MEM with a constant address is not sharable. The problem is that
5042 the constant address may need to be reloaded. If the mem is shared,
5043 then reloading one copy of this mem will cause all copies to appear
5044 to have been reloaded. */
5046 default:
5047 break;
5050 copy = rtx_alloc (code);
5052 /* Copy the various flags, and other information. We assume that
5053 all fields need copying, and then clear the fields that should
5054 not be copied. That is the sensible default behavior, and forces
5055 us to explicitly document why we are *not* copying a flag. */
5056 memcpy (copy, orig, RTX_HDR_SIZE);
5058 /* We do not copy the USED flag, which is used as a mark bit during
5059 walks over the RTL. */
5060 RTX_FLAG (copy, used) = 0;
5062 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5063 if (INSN_P (orig))
5065 RTX_FLAG (copy, jump) = 0;
5066 RTX_FLAG (copy, call) = 0;
5067 RTX_FLAG (copy, frame_related) = 0;
5070 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5072 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5074 copy->u.fld[i] = orig->u.fld[i];
5075 switch (*format_ptr++)
5077 case 'e':
5078 if (XEXP (orig, i) != NULL)
5079 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5080 break;
5082 case 'E':
5083 case 'V':
5084 if (XVEC (orig, i) == orig_asm_constraints_vector)
5085 XVEC (copy, i) = copy_asm_constraints_vector;
5086 else if (XVEC (orig, i) == orig_asm_operands_vector)
5087 XVEC (copy, i) = copy_asm_operands_vector;
5088 else if (XVEC (orig, i) != NULL)
5090 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5091 for (j = 0; j < XVECLEN (copy, i); j++)
5092 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5094 break;
5096 case 't':
5097 case 'w':
5098 case 'i':
5099 case 's':
5100 case 'S':
5101 case 'u':
5102 case '0':
5103 /* These are left unchanged. */
5104 break;
5106 default:
5107 abort ();
5111 if (code == SCRATCH)
5113 i = copy_insn_n_scratches++;
5114 if (i >= MAX_RECOG_OPERANDS)
5115 abort ();
5116 copy_insn_scratch_in[i] = orig;
5117 copy_insn_scratch_out[i] = copy;
5119 else if (code == ASM_OPERANDS)
5121 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5122 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5123 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5124 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5127 return copy;
5130 /* Create a new copy of an rtx.
5131 This function differs from copy_rtx in that it handles SCRATCHes and
5132 ASM_OPERANDs properly.
5133 INSN doesn't really have to be a full INSN; it could be just the
5134 pattern. */
5136 copy_insn (rtx insn)
5138 copy_insn_n_scratches = 0;
5139 orig_asm_operands_vector = 0;
5140 orig_asm_constraints_vector = 0;
5141 copy_asm_operands_vector = 0;
5142 copy_asm_constraints_vector = 0;
5143 return copy_insn_1 (insn);
5146 /* Initialize data structures and variables in this file
5147 before generating rtl for each function. */
5149 void
5150 init_emit (void)
5152 struct function *f = cfun;
5154 f->emit = ggc_alloc (sizeof (struct emit_status));
5155 first_insn = NULL;
5156 last_insn = NULL;
5157 cur_insn_uid = 1;
5158 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5159 last_location = UNKNOWN_LOCATION;
5160 first_label_num = label_num;
5161 last_label_num = 0;
5162 seq_stack = NULL;
5164 /* Init the tables that describe all the pseudo regs. */
5166 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5168 f->emit->regno_pointer_align
5169 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5170 * sizeof (unsigned char));
5172 regno_reg_rtx
5173 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5175 /* Put copies of all the hard registers into regno_reg_rtx. */
5176 memcpy (regno_reg_rtx,
5177 static_regno_reg_rtx,
5178 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5180 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5181 init_virtual_regs (f->emit);
5183 /* Indicate that the virtual registers and stack locations are
5184 all pointers. */
5185 REG_POINTER (stack_pointer_rtx) = 1;
5186 REG_POINTER (frame_pointer_rtx) = 1;
5187 REG_POINTER (hard_frame_pointer_rtx) = 1;
5188 REG_POINTER (arg_pointer_rtx) = 1;
5190 REG_POINTER (virtual_incoming_args_rtx) = 1;
5191 REG_POINTER (virtual_stack_vars_rtx) = 1;
5192 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5193 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5194 REG_POINTER (virtual_cfa_rtx) = 1;
5196 #ifdef STACK_BOUNDARY
5197 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5198 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5199 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5200 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5202 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5203 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5204 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5205 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5206 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5207 #endif
5209 #ifdef INIT_EXPANDERS
5210 INIT_EXPANDERS;
5211 #endif
5214 /* Generate the constant 0. */
5216 static rtx
5217 gen_const_vector_0 (enum machine_mode mode)
5219 rtx tem;
5220 rtvec v;
5221 int units, i;
5222 enum machine_mode inner;
5224 units = GET_MODE_NUNITS (mode);
5225 inner = GET_MODE_INNER (mode);
5227 v = rtvec_alloc (units);
5229 /* We need to call this function after we to set CONST0_RTX first. */
5230 if (!CONST0_RTX (inner))
5231 abort ();
5233 for (i = 0; i < units; ++i)
5234 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5236 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5237 return tem;
5240 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5241 all elements are zero. */
5243 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5245 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5246 int i;
5248 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5249 if (RTVEC_ELT (v, i) != inner_zero)
5250 return gen_rtx_raw_CONST_VECTOR (mode, v);
5251 return CONST0_RTX (mode);
5254 /* Create some permanent unique rtl objects shared between all functions.
5255 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5257 void
5258 init_emit_once (int line_numbers)
5260 int i;
5261 enum machine_mode mode;
5262 enum machine_mode double_mode;
5264 /* We need reg_raw_mode, so initialize the modes now. */
5265 init_reg_modes_once ();
5267 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5268 tables. */
5269 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5270 const_int_htab_eq, NULL);
5272 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5273 const_double_htab_eq, NULL);
5275 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5276 mem_attrs_htab_eq, NULL);
5277 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5278 reg_attrs_htab_eq, NULL);
5280 no_line_numbers = ! line_numbers;
5282 /* Compute the word and byte modes. */
5284 byte_mode = VOIDmode;
5285 word_mode = VOIDmode;
5286 double_mode = VOIDmode;
5288 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5289 mode = GET_MODE_WIDER_MODE (mode))
5291 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5292 && byte_mode == VOIDmode)
5293 byte_mode = mode;
5295 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5296 && word_mode == VOIDmode)
5297 word_mode = mode;
5300 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5301 mode = GET_MODE_WIDER_MODE (mode))
5303 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5304 && double_mode == VOIDmode)
5305 double_mode = mode;
5308 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5310 /* Assign register numbers to the globally defined register rtx.
5311 This must be done at runtime because the register number field
5312 is in a union and some compilers can't initialize unions. */
5314 pc_rtx = gen_rtx_PC (VOIDmode);
5315 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5316 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5317 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5318 if (hard_frame_pointer_rtx == 0)
5319 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5320 HARD_FRAME_POINTER_REGNUM);
5321 if (arg_pointer_rtx == 0)
5322 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5323 virtual_incoming_args_rtx =
5324 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5325 virtual_stack_vars_rtx =
5326 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5327 virtual_stack_dynamic_rtx =
5328 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5329 virtual_outgoing_args_rtx =
5330 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5331 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5333 /* Initialize RTL for commonly used hard registers. These are
5334 copied into regno_reg_rtx as we begin to compile each function. */
5335 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5336 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5338 #ifdef INIT_EXPANDERS
5339 /* This is to initialize {init|mark|free}_machine_status before the first
5340 call to push_function_context_to. This is needed by the Chill front
5341 end which calls push_function_context_to before the first call to
5342 init_function_start. */
5343 INIT_EXPANDERS;
5344 #endif
5346 /* Create the unique rtx's for certain rtx codes and operand values. */
5348 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5349 tries to use these variables. */
5350 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5351 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5352 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5354 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5355 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5356 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5357 else
5358 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5360 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5361 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5362 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5363 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5364 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5365 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5366 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5368 dconsthalf = dconst1;
5369 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5371 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5373 /* Initialize mathematical constants for constant folding builtins.
5374 These constants need to be given to at least 160 bits precision. */
5375 real_from_string (&dconstpi,
5376 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5377 real_from_string (&dconste,
5378 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5380 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5382 REAL_VALUE_TYPE *r =
5383 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5386 mode = GET_MODE_WIDER_MODE (mode))
5387 const_tiny_rtx[i][(int) mode] =
5388 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5390 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5393 mode = GET_MODE_WIDER_MODE (mode))
5394 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5396 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5397 mode != VOIDmode;
5398 mode = GET_MODE_WIDER_MODE (mode))
5399 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5402 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5403 mode != VOIDmode;
5404 mode = GET_MODE_WIDER_MODE (mode))
5405 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5408 mode != VOIDmode;
5409 mode = GET_MODE_WIDER_MODE (mode))
5410 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5412 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5413 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5414 const_tiny_rtx[0][i] = const0_rtx;
5416 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5417 if (STORE_FLAG_VALUE == 1)
5418 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5420 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5421 return_address_pointer_rtx
5422 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5423 #endif
5425 #ifdef STATIC_CHAIN_REGNUM
5426 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5428 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5429 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5430 static_chain_incoming_rtx
5431 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5432 else
5433 #endif
5434 static_chain_incoming_rtx = static_chain_rtx;
5435 #endif
5437 #ifdef STATIC_CHAIN
5438 static_chain_rtx = STATIC_CHAIN;
5440 #ifdef STATIC_CHAIN_INCOMING
5441 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5442 #else
5443 static_chain_incoming_rtx = static_chain_rtx;
5444 #endif
5445 #endif
5447 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5448 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5451 /* Produce exact duplicate of insn INSN after AFTER.
5452 Care updating of libcall regions if present. */
5455 emit_copy_of_insn_after (rtx insn, rtx after)
5457 rtx new;
5458 rtx note1, note2, link;
5460 switch (GET_CODE (insn))
5462 case INSN:
5463 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5464 break;
5466 case JUMP_INSN:
5467 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5468 break;
5470 case CALL_INSN:
5471 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5472 if (CALL_INSN_FUNCTION_USAGE (insn))
5473 CALL_INSN_FUNCTION_USAGE (new)
5474 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5475 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5476 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5477 break;
5479 default:
5480 abort ();
5483 /* Update LABEL_NUSES. */
5484 mark_jump_label (PATTERN (new), new, 0);
5486 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5488 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5489 make them. */
5490 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5491 if (REG_NOTE_KIND (link) != REG_LABEL)
5493 if (GET_CODE (link) == EXPR_LIST)
5494 REG_NOTES (new)
5495 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5496 XEXP (link, 0),
5497 REG_NOTES (new)));
5498 else
5499 REG_NOTES (new)
5500 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5501 XEXP (link, 0),
5502 REG_NOTES (new)));
5505 /* Fix the libcall sequences. */
5506 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5508 rtx p = new;
5509 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5510 p = PREV_INSN (p);
5511 XEXP (note1, 0) = p;
5512 XEXP (note2, 0) = new;
5514 INSN_CODE (new) = INSN_CODE (insn);
5515 return new;
5518 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5520 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5522 if (hard_reg_clobbers[mode][regno])
5523 return hard_reg_clobbers[mode][regno];
5524 else
5525 return (hard_reg_clobbers[mode][regno] =
5526 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5529 #include "gt-emit-rtl.h"