2002-06-11 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / emit-rtl.c
blobbdcd1cd03ccffabc906f6f84bcaf76c54e1daaef
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147 htab_t const_int_htab;
149 /* A hash table storing memory attribute structures. */
150 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151 htab_t mem_attrs_htab;
153 /* A hash table storing all CONST_DOUBLEs. */
154 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
155 htab_t const_double_htab;
157 #define first_insn (cfun->emit->x_first_insn)
158 #define last_insn (cfun->emit->x_last_insn)
159 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
160 #define last_linenum (cfun->emit->x_last_linenum)
161 #define last_filename (cfun->emit->x_last_filename)
162 #define first_label_num (cfun->emit->x_first_label_num)
164 static rtx make_jump_insn_raw PARAMS ((rtx));
165 static rtx make_call_insn_raw PARAMS ((rtx));
166 static rtx find_line_note PARAMS ((rtx));
167 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
168 int));
169 static void unshare_all_rtl_1 PARAMS ((rtx));
170 static void unshare_all_decls PARAMS ((tree));
171 static void reset_used_decls PARAMS ((tree));
172 static void mark_label_nuses PARAMS ((rtx));
173 static hashval_t const_int_htab_hash PARAMS ((const void *));
174 static int const_int_htab_eq PARAMS ((const void *,
175 const void *));
176 static hashval_t const_double_htab_hash PARAMS ((const void *));
177 static int const_double_htab_eq PARAMS ((const void *,
178 const void *));
179 static rtx lookup_const_double PARAMS ((rtx));
180 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
181 static int mem_attrs_htab_eq PARAMS ((const void *,
182 const void *));
183 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
184 rtx, unsigned int,
185 enum machine_mode));
186 static tree component_ref_for_mem_expr PARAMS ((tree));
187 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
189 /* Probability of the conditional branch currently proceeded by try_split.
190 Set to -1 otherwise. */
191 int split_branch_probability = -1;
193 /* Returns a hash code for X (which is a really a CONST_INT). */
195 static hashval_t
196 const_int_htab_hash (x)
197 const void *x;
199 return (hashval_t) INTVAL ((struct rtx_def *) x);
202 /* Returns non-zero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
206 static int
207 const_int_htab_eq (x, y)
208 const void *x;
209 const void *y;
211 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
214 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
215 static hashval_t
216 const_double_htab_hash (x)
217 const void *x;
219 hashval_t h = 0;
220 size_t i;
221 rtx value = (rtx) x;
223 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
224 h ^= XWINT (value, i);
225 return h;
228 /* Returns non-zero if the value represented by X (really a ...)
229 is the same as that represented by Y (really a ...) */
230 static int
231 const_double_htab_eq (x, y)
232 const void *x;
233 const void *y;
235 rtx a = (rtx)x, b = (rtx)y;
236 size_t i;
238 if (GET_MODE (a) != GET_MODE (b))
239 return 0;
240 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
241 if (XWINT (a, i) != XWINT (b, i))
242 return 0;
244 return 1;
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
249 static hashval_t
250 mem_attrs_htab_hash (x)
251 const void *x;
253 mem_attrs *p = (mem_attrs *) x;
255 return (p->alias ^ (p->align * 1000)
256 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
257 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
258 ^ (size_t) p->expr);
261 /* Returns non-zero if the value represented by X (which is really a
262 mem_attrs *) is the same as that given by Y (which is also really a
263 mem_attrs *). */
265 static int
266 mem_attrs_htab_eq (x, y)
267 const void *x;
268 const void *y;
270 mem_attrs *p = (mem_attrs *) x;
271 mem_attrs *q = (mem_attrs *) y;
273 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
274 && p->size == q->size && p->align == q->align);
277 /* Allocate a new mem_attrs structure and insert it into the hash table if
278 one identical to it is not already in the table. We are doing this for
279 MEM of mode MODE. */
281 static mem_attrs *
282 get_mem_attrs (alias, expr, offset, size, align, mode)
283 HOST_WIDE_INT alias;
284 tree expr;
285 rtx offset;
286 rtx size;
287 unsigned int align;
288 enum machine_mode mode;
290 mem_attrs attrs;
291 void **slot;
293 /* If everything is the default, we can just return zero. */
294 if (alias == 0 && expr == 0 && offset == 0
295 && (size == 0
296 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
297 && (align == BITS_PER_UNIT
298 || (STRICT_ALIGNMENT
299 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
300 return 0;
302 attrs.alias = alias;
303 attrs.expr = expr;
304 attrs.offset = offset;
305 attrs.size = size;
306 attrs.align = align;
308 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
309 if (*slot == 0)
311 *slot = ggc_alloc (sizeof (mem_attrs));
312 memcpy (*slot, &attrs, sizeof (mem_attrs));
315 return *slot;
318 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
319 don't attempt to share with the various global pieces of rtl (such as
320 frame_pointer_rtx). */
323 gen_raw_REG (mode, regno)
324 enum machine_mode mode;
325 int regno;
327 rtx x = gen_rtx_raw_REG (mode, regno);
328 ORIGINAL_REGNO (x) = regno;
329 return x;
332 /* There are some RTL codes that require special attention; the generation
333 functions do the raw handling. If you add to this list, modify
334 special_rtx in gengenrtl.c as well. */
337 gen_rtx_CONST_INT (mode, arg)
338 enum machine_mode mode ATTRIBUTE_UNUSED;
339 HOST_WIDE_INT arg;
341 void **slot;
343 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
344 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
346 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
347 if (const_true_rtx && arg == STORE_FLAG_VALUE)
348 return const_true_rtx;
349 #endif
351 /* Look up the CONST_INT in the hash table. */
352 slot = htab_find_slot_with_hash (const_int_htab, &arg,
353 (hashval_t) arg, INSERT);
354 if (*slot == 0)
355 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
357 return (rtx) *slot;
361 gen_int_mode (c, mode)
362 HOST_WIDE_INT c;
363 enum machine_mode mode;
365 return GEN_INT (trunc_int_for_mode (c, mode));
368 /* CONST_DOUBLEs might be created from pairs of integers, or from
369 REAL_VALUE_TYPEs. Also, their length is known only at run time,
370 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
372 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
373 hash table. If so, return its counterpart; otherwise add it
374 to the hash table and return it. */
375 static rtx
376 lookup_const_double (real)
377 rtx real;
379 void **slot = htab_find_slot (const_double_htab, real, INSERT);
380 if (*slot == 0)
381 *slot = real;
383 return (rtx) *slot;
386 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
387 VALUE in mode MODE. */
389 const_double_from_real_value (value, mode)
390 REAL_VALUE_TYPE value;
391 enum machine_mode mode;
393 rtx real = rtx_alloc (CONST_DOUBLE);
394 PUT_MODE (real, mode);
396 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
398 return lookup_const_double (real);
401 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
402 of ints: I0 is the low-order word and I1 is the high-order word.
403 Do not use this routine for non-integer modes; convert to
404 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
407 immed_double_const (i0, i1, mode)
408 HOST_WIDE_INT i0, i1;
409 enum machine_mode mode;
411 rtx value;
412 unsigned int i;
414 if (mode != VOIDmode)
416 int width;
417 if (GET_MODE_CLASS (mode) != MODE_INT
418 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
419 abort ();
421 /* We clear out all bits that don't belong in MODE, unless they and
422 our sign bit are all one. So we get either a reasonable negative
423 value or a reasonable unsigned value for this mode. */
424 width = GET_MODE_BITSIZE (mode);
425 if (width < HOST_BITS_PER_WIDE_INT
426 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
427 != ((HOST_WIDE_INT) (-1) << (width - 1))))
428 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
429 else if (width == HOST_BITS_PER_WIDE_INT
430 && ! (i1 == ~0 && i0 < 0))
431 i1 = 0;
432 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
433 /* We cannot represent this value as a constant. */
434 abort ();
436 /* If this would be an entire word for the target, but is not for
437 the host, then sign-extend on the host so that the number will
438 look the same way on the host that it would on the target.
440 For example, when building a 64 bit alpha hosted 32 bit sparc
441 targeted compiler, then we want the 32 bit unsigned value -1 to be
442 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
443 The latter confuses the sparc backend. */
445 if (width < HOST_BITS_PER_WIDE_INT
446 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
447 i0 |= ((HOST_WIDE_INT) (-1) << width);
449 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
450 CONST_INT.
452 ??? Strictly speaking, this is wrong if we create a CONST_INT for
453 a large unsigned constant with the size of MODE being
454 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
455 in a wider mode. In that case we will mis-interpret it as a
456 negative number.
458 Unfortunately, the only alternative is to make a CONST_DOUBLE for
459 any constant in any mode if it is an unsigned constant larger
460 than the maximum signed integer in an int on the host. However,
461 doing this will break everyone that always expects to see a
462 CONST_INT for SImode and smaller.
464 We have always been making CONST_INTs in this case, so nothing
465 new is being broken. */
467 if (width <= HOST_BITS_PER_WIDE_INT)
468 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
471 /* If this integer fits in one word, return a CONST_INT. */
472 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
473 return GEN_INT (i0);
475 /* We use VOIDmode for integers. */
476 value = rtx_alloc (CONST_DOUBLE);
477 PUT_MODE (value, VOIDmode);
479 CONST_DOUBLE_LOW (value) = i0;
480 CONST_DOUBLE_HIGH (value) = i1;
482 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
483 XWINT (value, i) = 0;
485 return lookup_const_double (value);
489 gen_rtx_REG (mode, regno)
490 enum machine_mode mode;
491 unsigned int regno;
493 /* In case the MD file explicitly references the frame pointer, have
494 all such references point to the same frame pointer. This is
495 used during frame pointer elimination to distinguish the explicit
496 references to these registers from pseudos that happened to be
497 assigned to them.
499 If we have eliminated the frame pointer or arg pointer, we will
500 be using it as a normal register, for example as a spill
501 register. In such cases, we might be accessing it in a mode that
502 is not Pmode and therefore cannot use the pre-allocated rtx.
504 Also don't do this when we are making new REGs in reload, since
505 we don't want to get confused with the real pointers. */
507 if (mode == Pmode && !reload_in_progress)
509 if (regno == FRAME_POINTER_REGNUM)
510 return frame_pointer_rtx;
511 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
512 if (regno == HARD_FRAME_POINTER_REGNUM)
513 return hard_frame_pointer_rtx;
514 #endif
515 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
516 if (regno == ARG_POINTER_REGNUM)
517 return arg_pointer_rtx;
518 #endif
519 #ifdef RETURN_ADDRESS_POINTER_REGNUM
520 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
521 return return_address_pointer_rtx;
522 #endif
523 if (regno == PIC_OFFSET_TABLE_REGNUM
524 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
525 return pic_offset_table_rtx;
526 if (regno == STACK_POINTER_REGNUM)
527 return stack_pointer_rtx;
530 return gen_raw_REG (mode, regno);
534 gen_rtx_MEM (mode, addr)
535 enum machine_mode mode;
536 rtx addr;
538 rtx rt = gen_rtx_raw_MEM (mode, addr);
540 /* This field is not cleared by the mere allocation of the rtx, so
541 we clear it here. */
542 MEM_ATTRS (rt) = 0;
544 return rt;
548 gen_rtx_SUBREG (mode, reg, offset)
549 enum machine_mode mode;
550 rtx reg;
551 int offset;
553 /* This is the most common failure type.
554 Catch it early so we can see who does it. */
555 if ((offset % GET_MODE_SIZE (mode)) != 0)
556 abort ();
558 /* This check isn't usable right now because combine will
559 throw arbitrary crap like a CALL into a SUBREG in
560 gen_lowpart_for_combine so we must just eat it. */
561 #if 0
562 /* Check for this too. */
563 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
564 abort ();
565 #endif
566 return gen_rtx_raw_SUBREG (mode, reg, offset);
569 /* Generate a SUBREG representing the least-significant part of REG if MODE
570 is smaller than mode of REG, otherwise paradoxical SUBREG. */
573 gen_lowpart_SUBREG (mode, reg)
574 enum machine_mode mode;
575 rtx reg;
577 enum machine_mode inmode;
579 inmode = GET_MODE (reg);
580 if (inmode == VOIDmode)
581 inmode = mode;
582 return gen_rtx_SUBREG (mode, reg,
583 subreg_lowpart_offset (mode, inmode));
586 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
588 ** This routine generates an RTX of the size specified by
589 ** <code>, which is an RTX code. The RTX structure is initialized
590 ** from the arguments <element1> through <elementn>, which are
591 ** interpreted according to the specific RTX type's format. The
592 ** special machine mode associated with the rtx (if any) is specified
593 ** in <mode>.
595 ** gen_rtx can be invoked in a way which resembles the lisp-like
596 ** rtx it will generate. For example, the following rtx structure:
598 ** (plus:QI (mem:QI (reg:SI 1))
599 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
601 ** ...would be generated by the following C code:
603 ** gen_rtx (PLUS, QImode,
604 ** gen_rtx (MEM, QImode,
605 ** gen_rtx (REG, SImode, 1)),
606 ** gen_rtx (MEM, QImode,
607 ** gen_rtx (PLUS, SImode,
608 ** gen_rtx (REG, SImode, 2),
609 ** gen_rtx (REG, SImode, 3)))),
612 /*VARARGS2*/
614 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
616 int i; /* Array indices... */
617 const char *fmt; /* Current rtx's format... */
618 rtx rt_val; /* RTX to return to caller... */
620 VA_OPEN (p, mode);
621 VA_FIXEDARG (p, enum rtx_code, code);
622 VA_FIXEDARG (p, enum machine_mode, mode);
624 switch (code)
626 case CONST_INT:
627 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
628 break;
630 case CONST_DOUBLE:
632 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
633 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
635 rt_val = immed_double_const (arg0, arg1, mode);
637 break;
639 case REG:
640 rt_val = gen_rtx_REG (mode, va_arg (p, int));
641 break;
643 case MEM:
644 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
645 break;
647 default:
648 rt_val = rtx_alloc (code); /* Allocate the storage space. */
649 rt_val->mode = mode; /* Store the machine mode... */
651 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
652 for (i = 0; i < GET_RTX_LENGTH (code); i++)
654 switch (*fmt++)
656 case '0': /* Unused field. */
657 break;
659 case 'i': /* An integer? */
660 XINT (rt_val, i) = va_arg (p, int);
661 break;
663 case 'w': /* A wide integer? */
664 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
665 break;
667 case 's': /* A string? */
668 XSTR (rt_val, i) = va_arg (p, char *);
669 break;
671 case 'e': /* An expression? */
672 case 'u': /* An insn? Same except when printing. */
673 XEXP (rt_val, i) = va_arg (p, rtx);
674 break;
676 case 'E': /* An RTX vector? */
677 XVEC (rt_val, i) = va_arg (p, rtvec);
678 break;
680 case 'b': /* A bitmap? */
681 XBITMAP (rt_val, i) = va_arg (p, bitmap);
682 break;
684 case 't': /* A tree? */
685 XTREE (rt_val, i) = va_arg (p, tree);
686 break;
688 default:
689 abort ();
692 break;
695 VA_CLOSE (p);
696 return rt_val;
699 /* gen_rtvec (n, [rt1, ..., rtn])
701 ** This routine creates an rtvec and stores within it the
702 ** pointers to rtx's which are its arguments.
705 /*VARARGS1*/
706 rtvec
707 gen_rtvec VPARAMS ((int n, ...))
709 int i, save_n;
710 rtx *vector;
712 VA_OPEN (p, n);
713 VA_FIXEDARG (p, int, n);
715 if (n == 0)
716 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
718 vector = (rtx *) alloca (n * sizeof (rtx));
720 for (i = 0; i < n; i++)
721 vector[i] = va_arg (p, rtx);
723 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
724 save_n = n;
725 VA_CLOSE (p);
727 return gen_rtvec_v (save_n, vector);
730 rtvec
731 gen_rtvec_v (n, argp)
732 int n;
733 rtx *argp;
735 int i;
736 rtvec rt_val;
738 if (n == 0)
739 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
741 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
743 for (i = 0; i < n; i++)
744 rt_val->elem[i] = *argp++;
746 return rt_val;
749 /* Generate a REG rtx for a new pseudo register of mode MODE.
750 This pseudo is assigned the next sequential register number. */
753 gen_reg_rtx (mode)
754 enum machine_mode mode;
756 struct function *f = cfun;
757 rtx val;
759 /* Don't let anything called after initial flow analysis create new
760 registers. */
761 if (no_new_pseudos)
762 abort ();
764 if (generating_concat_p
765 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
766 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
768 /* For complex modes, don't make a single pseudo.
769 Instead, make a CONCAT of two pseudos.
770 This allows noncontiguous allocation of the real and imaginary parts,
771 which makes much better code. Besides, allocating DCmode
772 pseudos overstrains reload on some machines like the 386. */
773 rtx realpart, imagpart;
774 int size = GET_MODE_UNIT_SIZE (mode);
775 enum machine_mode partmode
776 = mode_for_size (size * BITS_PER_UNIT,
777 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
778 ? MODE_FLOAT : MODE_INT),
781 realpart = gen_reg_rtx (partmode);
782 imagpart = gen_reg_rtx (partmode);
783 return gen_rtx_CONCAT (mode, realpart, imagpart);
786 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
787 enough to have an element for this pseudo reg number. */
789 if (reg_rtx_no == f->emit->regno_pointer_align_length)
791 int old_size = f->emit->regno_pointer_align_length;
792 char *new;
793 rtx *new1;
794 tree *new2;
796 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
797 memset (new + old_size, 0, old_size);
798 f->emit->regno_pointer_align = (unsigned char *) new;
800 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
801 old_size * 2 * sizeof (rtx));
802 memset (new1 + old_size, 0, old_size * sizeof (rtx));
803 regno_reg_rtx = new1;
805 new2 = (tree *) ggc_realloc (f->emit->regno_decl,
806 old_size * 2 * sizeof (tree));
807 memset (new2 + old_size, 0, old_size * sizeof (tree));
808 f->emit->regno_decl = new2;
810 f->emit->regno_pointer_align_length = old_size * 2;
813 val = gen_raw_REG (mode, reg_rtx_no);
814 regno_reg_rtx[reg_rtx_no++] = val;
815 return val;
818 /* Identify REG (which may be a CONCAT) as a user register. */
820 void
821 mark_user_reg (reg)
822 rtx reg;
824 if (GET_CODE (reg) == CONCAT)
826 REG_USERVAR_P (XEXP (reg, 0)) = 1;
827 REG_USERVAR_P (XEXP (reg, 1)) = 1;
829 else if (GET_CODE (reg) == REG)
830 REG_USERVAR_P (reg) = 1;
831 else
832 abort ();
835 /* Identify REG as a probable pointer register and show its alignment
836 as ALIGN, if nonzero. */
838 void
839 mark_reg_pointer (reg, align)
840 rtx reg;
841 int align;
843 if (! REG_POINTER (reg))
845 REG_POINTER (reg) = 1;
847 if (align)
848 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
850 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
851 /* We can no-longer be sure just how aligned this pointer is */
852 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
855 /* Return 1 plus largest pseudo reg number used in the current function. */
858 max_reg_num ()
860 return reg_rtx_no;
863 /* Return 1 + the largest label number used so far in the current function. */
866 max_label_num ()
868 if (last_label_num && label_num == base_label_num)
869 return last_label_num;
870 return label_num;
873 /* Return first label number used in this function (if any were used). */
876 get_first_label_num ()
878 return first_label_num;
881 /* Return the final regno of X, which is a SUBREG of a hard
882 register. */
884 subreg_hard_regno (x, check_mode)
885 rtx x;
886 int check_mode;
888 enum machine_mode mode = GET_MODE (x);
889 unsigned int byte_offset, base_regno, final_regno;
890 rtx reg = SUBREG_REG (x);
892 /* This is where we attempt to catch illegal subregs
893 created by the compiler. */
894 if (GET_CODE (x) != SUBREG
895 || GET_CODE (reg) != REG)
896 abort ();
897 base_regno = REGNO (reg);
898 if (base_regno >= FIRST_PSEUDO_REGISTER)
899 abort ();
900 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
901 abort ();
903 /* Catch non-congruent offsets too. */
904 byte_offset = SUBREG_BYTE (x);
905 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
906 abort ();
908 final_regno = subreg_regno (x);
910 return final_regno;
913 /* Return a value representing some low-order bits of X, where the number
914 of low-order bits is given by MODE. Note that no conversion is done
915 between floating-point and fixed-point values, rather, the bit
916 representation is returned.
918 This function handles the cases in common between gen_lowpart, below,
919 and two variants in cse.c and combine.c. These are the cases that can
920 be safely handled at all points in the compilation.
922 If this is not a case we can handle, return 0. */
925 gen_lowpart_common (mode, x)
926 enum machine_mode mode;
927 rtx x;
929 int msize = GET_MODE_SIZE (mode);
930 int xsize = GET_MODE_SIZE (GET_MODE (x));
931 int offset = 0;
933 if (GET_MODE (x) == mode)
934 return x;
936 /* MODE must occupy no more words than the mode of X. */
937 if (GET_MODE (x) != VOIDmode
938 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
939 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
940 return 0;
942 offset = subreg_lowpart_offset (mode, GET_MODE (x));
944 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
945 && (GET_MODE_CLASS (mode) == MODE_INT
946 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
948 /* If we are getting the low-order part of something that has been
949 sign- or zero-extended, we can either just use the object being
950 extended or make a narrower extension. If we want an even smaller
951 piece than the size of the object being extended, call ourselves
952 recursively.
954 This case is used mostly by combine and cse. */
956 if (GET_MODE (XEXP (x, 0)) == mode)
957 return XEXP (x, 0);
958 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
959 return gen_lowpart_common (mode, XEXP (x, 0));
960 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
961 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
963 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
964 || GET_CODE (x) == CONCAT)
965 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
966 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
967 from the low-order part of the constant. */
968 else if ((GET_MODE_CLASS (mode) == MODE_INT
969 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
970 && GET_MODE (x) == VOIDmode
971 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
973 /* If MODE is twice the host word size, X is already the desired
974 representation. Otherwise, if MODE is wider than a word, we can't
975 do this. If MODE is exactly a word, return just one CONST_INT. */
977 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
978 return x;
979 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
980 return 0;
981 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
982 return (GET_CODE (x) == CONST_INT ? x
983 : GEN_INT (CONST_DOUBLE_LOW (x)));
984 else
986 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
987 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
988 : CONST_DOUBLE_LOW (x));
990 /* Sign extend to HOST_WIDE_INT. */
991 val = trunc_int_for_mode (val, mode);
993 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
994 : GEN_INT (val));
998 /* The floating-point emulator can handle all conversions between
999 FP and integer operands. This simplifies reload because it
1000 doesn't have to deal with constructs like (subreg:DI
1001 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1002 /* Single-precision floats are always 32-bits and double-precision
1003 floats are always 64-bits. */
1005 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1006 && GET_MODE_BITSIZE (mode) == 32
1007 && GET_CODE (x) == CONST_INT)
1009 REAL_VALUE_TYPE r;
1010 HOST_WIDE_INT i;
1012 i = INTVAL (x);
1013 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1014 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1016 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1017 && GET_MODE_BITSIZE (mode) == 64
1018 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1019 && GET_MODE (x) == VOIDmode)
1021 REAL_VALUE_TYPE r;
1022 HOST_WIDE_INT i[2];
1023 HOST_WIDE_INT low, high;
1025 if (GET_CODE (x) == CONST_INT)
1027 low = INTVAL (x);
1028 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1030 else
1032 low = CONST_DOUBLE_LOW (x);
1033 high = CONST_DOUBLE_HIGH (x);
1036 #if HOST_BITS_PER_WIDE_INT == 32
1037 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1038 target machine. */
1039 if (WORDS_BIG_ENDIAN)
1040 i[0] = high, i[1] = low;
1041 else
1042 i[0] = low, i[1] = high;
1043 #else
1044 i[0] = low;
1045 #endif
1047 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1048 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1050 else if ((GET_MODE_CLASS (mode) == MODE_INT
1051 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1052 && GET_CODE (x) == CONST_DOUBLE
1053 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1055 REAL_VALUE_TYPE r;
1056 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1057 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1059 /* Convert 'r' into an array of four 32-bit words in target word
1060 order. */
1061 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1062 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1064 case 32:
1065 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1066 i[1] = 0;
1067 i[2] = 0;
1068 i[3 - 3 * endian] = 0;
1069 break;
1070 case 64:
1071 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1072 i[2 - 2 * endian] = 0;
1073 i[3 - 2 * endian] = 0;
1074 break;
1075 case 96:
1076 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1077 i[3 - 3 * endian] = 0;
1078 break;
1079 case 128:
1080 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1081 break;
1082 default:
1083 abort ();
1085 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1086 and return it. */
1087 #if HOST_BITS_PER_WIDE_INT == 32
1088 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1089 #else
1090 if (HOST_BITS_PER_WIDE_INT != 64)
1091 abort ();
1093 return immed_double_const ((((unsigned long) i[3 * endian])
1094 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1095 (((unsigned long) i[2 - endian])
1096 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1097 mode);
1098 #endif
1101 /* Otherwise, we can't do this. */
1102 return 0;
1105 /* Return the real part (which has mode MODE) of a complex value X.
1106 This always comes at the low address in memory. */
1109 gen_realpart (mode, x)
1110 enum machine_mode mode;
1111 rtx x;
1113 if (WORDS_BIG_ENDIAN
1114 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1115 && REG_P (x)
1116 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1117 internal_error
1118 ("can't access real part of complex value in hard register");
1119 else if (WORDS_BIG_ENDIAN)
1120 return gen_highpart (mode, x);
1121 else
1122 return gen_lowpart (mode, x);
1125 /* Return the imaginary part (which has mode MODE) of a complex value X.
1126 This always comes at the high address in memory. */
1129 gen_imagpart (mode, x)
1130 enum machine_mode mode;
1131 rtx x;
1133 if (WORDS_BIG_ENDIAN)
1134 return gen_lowpart (mode, x);
1135 else if (! WORDS_BIG_ENDIAN
1136 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1137 && REG_P (x)
1138 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1139 internal_error
1140 ("can't access imaginary part of complex value in hard register");
1141 else
1142 return gen_highpart (mode, x);
1145 /* Return 1 iff X, assumed to be a SUBREG,
1146 refers to the real part of the complex value in its containing reg.
1147 Complex values are always stored with the real part in the first word,
1148 regardless of WORDS_BIG_ENDIAN. */
1151 subreg_realpart_p (x)
1152 rtx x;
1154 if (GET_CODE (x) != SUBREG)
1155 abort ();
1157 return ((unsigned int) SUBREG_BYTE (x)
1158 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1161 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1162 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1163 least-significant part of X.
1164 MODE specifies how big a part of X to return;
1165 it usually should not be larger than a word.
1166 If X is a MEM whose address is a QUEUED, the value may be so also. */
1169 gen_lowpart (mode, x)
1170 enum machine_mode mode;
1171 rtx x;
1173 rtx result = gen_lowpart_common (mode, x);
1175 if (result)
1176 return result;
1177 else if (GET_CODE (x) == REG)
1179 /* Must be a hard reg that's not valid in MODE. */
1180 result = gen_lowpart_common (mode, copy_to_reg (x));
1181 if (result == 0)
1182 abort ();
1183 return result;
1185 else if (GET_CODE (x) == MEM)
1187 /* The only additional case we can do is MEM. */
1188 int offset = 0;
1189 if (WORDS_BIG_ENDIAN)
1190 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1191 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1193 if (BYTES_BIG_ENDIAN)
1194 /* Adjust the address so that the address-after-the-data
1195 is unchanged. */
1196 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1197 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1199 return adjust_address (x, mode, offset);
1201 else if (GET_CODE (x) == ADDRESSOF)
1202 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1203 else
1204 abort ();
1207 /* Like `gen_lowpart', but refer to the most significant part.
1208 This is used to access the imaginary part of a complex number. */
1211 gen_highpart (mode, x)
1212 enum machine_mode mode;
1213 rtx x;
1215 unsigned int msize = GET_MODE_SIZE (mode);
1216 rtx result;
1218 /* This case loses if X is a subreg. To catch bugs early,
1219 complain if an invalid MODE is used even in other cases. */
1220 if (msize > UNITS_PER_WORD
1221 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1222 abort ();
1224 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1225 subreg_highpart_offset (mode, GET_MODE (x)));
1227 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1228 the target if we have a MEM. gen_highpart must return a valid operand,
1229 emitting code if necessary to do so. */
1230 if (result != NULL_RTX && GET_CODE (result) == MEM)
1231 result = validize_mem (result);
1233 if (!result)
1234 abort ();
1235 return result;
1238 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1239 be VOIDmode constant. */
1241 gen_highpart_mode (outermode, innermode, exp)
1242 enum machine_mode outermode, innermode;
1243 rtx exp;
1245 if (GET_MODE (exp) != VOIDmode)
1247 if (GET_MODE (exp) != innermode)
1248 abort ();
1249 return gen_highpart (outermode, exp);
1251 return simplify_gen_subreg (outermode, exp, innermode,
1252 subreg_highpart_offset (outermode, innermode));
1255 /* Return offset in bytes to get OUTERMODE low part
1256 of the value in mode INNERMODE stored in memory in target format. */
1258 unsigned int
1259 subreg_lowpart_offset (outermode, innermode)
1260 enum machine_mode outermode, innermode;
1262 unsigned int offset = 0;
1263 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1265 if (difference > 0)
1267 if (WORDS_BIG_ENDIAN)
1268 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1269 if (BYTES_BIG_ENDIAN)
1270 offset += difference % UNITS_PER_WORD;
1273 return offset;
1276 /* Return offset in bytes to get OUTERMODE high part
1277 of the value in mode INNERMODE stored in memory in target format. */
1278 unsigned int
1279 subreg_highpart_offset (outermode, innermode)
1280 enum machine_mode outermode, innermode;
1282 unsigned int offset = 0;
1283 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1285 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1286 abort ();
1288 if (difference > 0)
1290 if (! WORDS_BIG_ENDIAN)
1291 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1292 if (! BYTES_BIG_ENDIAN)
1293 offset += difference % UNITS_PER_WORD;
1296 return offset;
1299 /* Return 1 iff X, assumed to be a SUBREG,
1300 refers to the least significant part of its containing reg.
1301 If X is not a SUBREG, always return 1 (it is its own low part!). */
1304 subreg_lowpart_p (x)
1305 rtx x;
1307 if (GET_CODE (x) != SUBREG)
1308 return 1;
1309 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1310 return 0;
1312 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1313 == SUBREG_BYTE (x));
1317 /* Helper routine for all the constant cases of operand_subword.
1318 Some places invoke this directly. */
1321 constant_subword (op, offset, mode)
1322 rtx op;
1323 int offset;
1324 enum machine_mode mode;
1326 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1327 HOST_WIDE_INT val;
1329 /* If OP is already an integer word, return it. */
1330 if (GET_MODE_CLASS (mode) == MODE_INT
1331 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1332 return op;
1334 /* The output is some bits, the width of the target machine's word.
1335 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1336 host can't. */
1337 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1338 && GET_MODE_CLASS (mode) == MODE_FLOAT
1339 && GET_MODE_BITSIZE (mode) == 64
1340 && GET_CODE (op) == CONST_DOUBLE)
1342 long k[2];
1343 REAL_VALUE_TYPE rv;
1345 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1346 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1348 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1349 which the words are written depends on the word endianness.
1350 ??? This is a potential portability problem and should
1351 be fixed at some point.
1353 We must exercise caution with the sign bit. By definition there
1354 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1355 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1356 So we explicitly mask and sign-extend as necessary. */
1357 if (BITS_PER_WORD == 32)
1359 val = k[offset];
1360 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1361 return GEN_INT (val);
1363 #if HOST_BITS_PER_WIDE_INT >= 64
1364 else if (BITS_PER_WORD >= 64 && offset == 0)
1366 val = k[! WORDS_BIG_ENDIAN];
1367 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1368 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1369 return GEN_INT (val);
1371 #endif
1372 else if (BITS_PER_WORD == 16)
1374 val = k[offset >> 1];
1375 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1376 val >>= 16;
1377 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1378 return GEN_INT (val);
1380 else
1381 abort ();
1383 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1384 && GET_MODE_CLASS (mode) == MODE_FLOAT
1385 && GET_MODE_BITSIZE (mode) > 64
1386 && GET_CODE (op) == CONST_DOUBLE)
1388 long k[4];
1389 REAL_VALUE_TYPE rv;
1391 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1392 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1394 if (BITS_PER_WORD == 32)
1396 val = k[offset];
1397 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1398 return GEN_INT (val);
1400 #if HOST_BITS_PER_WIDE_INT >= 64
1401 else if (BITS_PER_WORD >= 64 && offset <= 1)
1403 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1404 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1405 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1406 return GEN_INT (val);
1408 #endif
1409 else
1410 abort ();
1413 /* Single word float is a little harder, since single- and double-word
1414 values often do not have the same high-order bits. We have already
1415 verified that we want the only defined word of the single-word value. */
1416 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1417 && GET_MODE_BITSIZE (mode) == 32
1418 && GET_CODE (op) == CONST_DOUBLE)
1420 long l;
1421 REAL_VALUE_TYPE rv;
1423 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1424 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1426 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1427 val = l;
1428 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1430 if (BITS_PER_WORD == 16)
1432 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1433 val >>= 16;
1434 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1437 return GEN_INT (val);
1440 /* The only remaining cases that we can handle are integers.
1441 Convert to proper endianness now since these cases need it.
1442 At this point, offset == 0 means the low-order word.
1444 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1445 in general. However, if OP is (const_int 0), we can just return
1446 it for any word. */
1448 if (op == const0_rtx)
1449 return op;
1451 if (GET_MODE_CLASS (mode) != MODE_INT
1452 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1453 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1454 return 0;
1456 if (WORDS_BIG_ENDIAN)
1457 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1459 /* Find out which word on the host machine this value is in and get
1460 it from the constant. */
1461 val = (offset / size_ratio == 0
1462 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1463 : (GET_CODE (op) == CONST_INT
1464 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1466 /* Get the value we want into the low bits of val. */
1467 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1468 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1470 val = trunc_int_for_mode (val, word_mode);
1472 return GEN_INT (val);
1475 /* Return subword OFFSET of operand OP.
1476 The word number, OFFSET, is interpreted as the word number starting
1477 at the low-order address. OFFSET 0 is the low-order word if not
1478 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1480 If we cannot extract the required word, we return zero. Otherwise,
1481 an rtx corresponding to the requested word will be returned.
1483 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1484 reload has completed, a valid address will always be returned. After
1485 reload, if a valid address cannot be returned, we return zero.
1487 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1488 it is the responsibility of the caller.
1490 MODE is the mode of OP in case it is a CONST_INT.
1492 ??? This is still rather broken for some cases. The problem for the
1493 moment is that all callers of this thing provide no 'goal mode' to
1494 tell us to work with. This exists because all callers were written
1495 in a word based SUBREG world.
1496 Now use of this function can be deprecated by simplify_subreg in most
1497 cases.
1501 operand_subword (op, offset, validate_address, mode)
1502 rtx op;
1503 unsigned int offset;
1504 int validate_address;
1505 enum machine_mode mode;
1507 if (mode == VOIDmode)
1508 mode = GET_MODE (op);
1510 if (mode == VOIDmode)
1511 abort ();
1513 /* If OP is narrower than a word, fail. */
1514 if (mode != BLKmode
1515 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1516 return 0;
1518 /* If we want a word outside OP, return zero. */
1519 if (mode != BLKmode
1520 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1521 return const0_rtx;
1523 /* Form a new MEM at the requested address. */
1524 if (GET_CODE (op) == MEM)
1526 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1528 if (! validate_address)
1529 return new;
1531 else if (reload_completed)
1533 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1534 return 0;
1536 else
1537 return replace_equiv_address (new, XEXP (new, 0));
1540 /* Rest can be handled by simplify_subreg. */
1541 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1544 /* Similar to `operand_subword', but never return 0. If we can't extract
1545 the required subword, put OP into a register and try again. If that fails,
1546 abort. We always validate the address in this case.
1548 MODE is the mode of OP, in case it is CONST_INT. */
1551 operand_subword_force (op, offset, mode)
1552 rtx op;
1553 unsigned int offset;
1554 enum machine_mode mode;
1556 rtx result = operand_subword (op, offset, 1, mode);
1558 if (result)
1559 return result;
1561 if (mode != BLKmode && mode != VOIDmode)
1563 /* If this is a register which can not be accessed by words, copy it
1564 to a pseudo register. */
1565 if (GET_CODE (op) == REG)
1566 op = copy_to_reg (op);
1567 else
1568 op = force_reg (mode, op);
1571 result = operand_subword (op, offset, 1, mode);
1572 if (result == 0)
1573 abort ();
1575 return result;
1578 /* Given a compare instruction, swap the operands.
1579 A test instruction is changed into a compare of 0 against the operand. */
1581 void
1582 reverse_comparison (insn)
1583 rtx insn;
1585 rtx body = PATTERN (insn);
1586 rtx comp;
1588 if (GET_CODE (body) == SET)
1589 comp = SET_SRC (body);
1590 else
1591 comp = SET_SRC (XVECEXP (body, 0, 0));
1593 if (GET_CODE (comp) == COMPARE)
1595 rtx op0 = XEXP (comp, 0);
1596 rtx op1 = XEXP (comp, 1);
1597 XEXP (comp, 0) = op1;
1598 XEXP (comp, 1) = op0;
1600 else
1602 rtx new = gen_rtx_COMPARE (VOIDmode,
1603 CONST0_RTX (GET_MODE (comp)), comp);
1604 if (GET_CODE (body) == SET)
1605 SET_SRC (body) = new;
1606 else
1607 SET_SRC (XVECEXP (body, 0, 0)) = new;
1611 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1612 or (2) a component ref of something variable. Represent the later with
1613 a NULL expression. */
1615 static tree
1616 component_ref_for_mem_expr (ref)
1617 tree ref;
1619 tree inner = TREE_OPERAND (ref, 0);
1621 if (TREE_CODE (inner) == COMPONENT_REF)
1622 inner = component_ref_for_mem_expr (inner);
1623 else
1625 tree placeholder_ptr = 0;
1627 /* Now remove any conversions: they don't change what the underlying
1628 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1629 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1630 || TREE_CODE (inner) == NON_LVALUE_EXPR
1631 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1632 || TREE_CODE (inner) == SAVE_EXPR
1633 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1634 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1635 inner = find_placeholder (inner, &placeholder_ptr);
1636 else
1637 inner = TREE_OPERAND (inner, 0);
1639 if (! DECL_P (inner))
1640 inner = NULL_TREE;
1643 if (inner == TREE_OPERAND (ref, 0))
1644 return ref;
1645 else
1646 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1647 TREE_OPERAND (ref, 1));
1650 /* Given REF, a MEM, and T, either the type of X or the expression
1651 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1652 if we are making a new object of this type. */
1654 void
1655 set_mem_attributes (ref, t, objectp)
1656 rtx ref;
1657 tree t;
1658 int objectp;
1660 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1661 tree expr = MEM_EXPR (ref);
1662 rtx offset = MEM_OFFSET (ref);
1663 rtx size = MEM_SIZE (ref);
1664 unsigned int align = MEM_ALIGN (ref);
1665 tree type;
1667 /* It can happen that type_for_mode was given a mode for which there
1668 is no language-level type. In which case it returns NULL, which
1669 we can see here. */
1670 if (t == NULL_TREE)
1671 return;
1673 type = TYPE_P (t) ? t : TREE_TYPE (t);
1675 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1676 wrong answer, as it assumes that DECL_RTL already has the right alias
1677 info. Callers should not set DECL_RTL until after the call to
1678 set_mem_attributes. */
1679 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1680 abort ();
1682 /* Get the alias set from the expression or type (perhaps using a
1683 front-end routine) and use it. */
1684 alias = get_alias_set (t);
1686 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1687 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1688 RTX_UNCHANGING_P (ref)
1689 |= ((lang_hooks.honor_readonly
1690 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1691 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1693 /* If we are making an object of this type, or if this is a DECL, we know
1694 that it is a scalar if the type is not an aggregate. */
1695 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1696 MEM_SCALAR_P (ref) = 1;
1698 /* We can set the alignment from the type if we are making an object,
1699 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1700 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1701 align = MAX (align, TYPE_ALIGN (type));
1703 /* If the size is known, we can set that. */
1704 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1705 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1707 /* If T is not a type, we may be able to deduce some more information about
1708 the expression. */
1709 if (! TYPE_P (t))
1711 maybe_set_unchanging (ref, t);
1712 if (TREE_THIS_VOLATILE (t))
1713 MEM_VOLATILE_P (ref) = 1;
1715 /* Now remove any conversions: they don't change what the underlying
1716 object is. Likewise for SAVE_EXPR. */
1717 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1718 || TREE_CODE (t) == NON_LVALUE_EXPR
1719 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1720 || TREE_CODE (t) == SAVE_EXPR)
1721 t = TREE_OPERAND (t, 0);
1723 /* If this expression can't be addressed (e.g., it contains a reference
1724 to a non-addressable field), show we don't change its alias set. */
1725 if (! can_address_p (t))
1726 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1728 /* If this is a decl, set the attributes of the MEM from it. */
1729 if (DECL_P (t))
1731 expr = t;
1732 offset = const0_rtx;
1733 size = (DECL_SIZE_UNIT (t)
1734 && host_integerp (DECL_SIZE_UNIT (t), 1)
1735 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1736 align = DECL_ALIGN (t);
1739 /* If this is a constant, we know the alignment. */
1740 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1742 align = TYPE_ALIGN (type);
1743 #ifdef CONSTANT_ALIGNMENT
1744 align = CONSTANT_ALIGNMENT (t, align);
1745 #endif
1748 /* If this is a field reference and not a bit-field, record it. */
1749 /* ??? There is some information that can be gleened from bit-fields,
1750 such as the word offset in the structure that might be modified.
1751 But skip it for now. */
1752 else if (TREE_CODE (t) == COMPONENT_REF
1753 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1755 expr = component_ref_for_mem_expr (t);
1756 offset = const0_rtx;
1757 /* ??? Any reason the field size would be different than
1758 the size we got from the type? */
1761 /* If this is an array reference, look for an outer field reference. */
1762 else if (TREE_CODE (t) == ARRAY_REF)
1764 tree off_tree = size_zero_node;
1768 off_tree
1769 = fold (build (PLUS_EXPR, sizetype,
1770 fold (build (MULT_EXPR, sizetype,
1771 TREE_OPERAND (t, 1),
1772 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1773 off_tree));
1774 t = TREE_OPERAND (t, 0);
1776 while (TREE_CODE (t) == ARRAY_REF);
1778 if (TREE_CODE (t) == COMPONENT_REF)
1780 expr = component_ref_for_mem_expr (t);
1781 if (host_integerp (off_tree, 1))
1782 offset = GEN_INT (tree_low_cst (off_tree, 1));
1783 /* ??? Any reason the field size would be different than
1784 the size we got from the type? */
1789 /* Now set the attributes we computed above. */
1790 MEM_ATTRS (ref)
1791 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1793 /* If this is already known to be a scalar or aggregate, we are done. */
1794 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1795 return;
1797 /* If it is a reference into an aggregate, this is part of an aggregate.
1798 Otherwise we don't know. */
1799 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1800 || TREE_CODE (t) == ARRAY_RANGE_REF
1801 || TREE_CODE (t) == BIT_FIELD_REF)
1802 MEM_IN_STRUCT_P (ref) = 1;
1805 /* Set the alias set of MEM to SET. */
1807 void
1808 set_mem_alias_set (mem, set)
1809 rtx mem;
1810 HOST_WIDE_INT set;
1812 #ifdef ENABLE_CHECKING
1813 /* If the new and old alias sets don't conflict, something is wrong. */
1814 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1815 abort ();
1816 #endif
1818 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1819 MEM_SIZE (mem), MEM_ALIGN (mem),
1820 GET_MODE (mem));
1823 /* Set the alignment of MEM to ALIGN bits. */
1825 void
1826 set_mem_align (mem, align)
1827 rtx mem;
1828 unsigned int align;
1830 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1831 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1832 GET_MODE (mem));
1835 /* Set the expr for MEM to EXPR. */
1837 void
1838 set_mem_expr (mem, expr)
1839 rtx mem;
1840 tree expr;
1842 MEM_ATTRS (mem)
1843 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1844 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1847 /* Set the offset of MEM to OFFSET. */
1849 void
1850 set_mem_offset (mem, offset)
1851 rtx mem, offset;
1853 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1854 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1855 GET_MODE (mem));
1858 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1859 and its address changed to ADDR. (VOIDmode means don't change the mode.
1860 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1861 returned memory location is required to be valid. The memory
1862 attributes are not changed. */
1864 static rtx
1865 change_address_1 (memref, mode, addr, validate)
1866 rtx memref;
1867 enum machine_mode mode;
1868 rtx addr;
1869 int validate;
1871 rtx new;
1873 if (GET_CODE (memref) != MEM)
1874 abort ();
1875 if (mode == VOIDmode)
1876 mode = GET_MODE (memref);
1877 if (addr == 0)
1878 addr = XEXP (memref, 0);
1880 if (validate)
1882 if (reload_in_progress || reload_completed)
1884 if (! memory_address_p (mode, addr))
1885 abort ();
1887 else
1888 addr = memory_address (mode, addr);
1891 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1892 return memref;
1894 new = gen_rtx_MEM (mode, addr);
1895 MEM_COPY_ATTRIBUTES (new, memref);
1896 return new;
1899 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1900 way we are changing MEMREF, so we only preserve the alias set. */
1903 change_address (memref, mode, addr)
1904 rtx memref;
1905 enum machine_mode mode;
1906 rtx addr;
1908 rtx new = change_address_1 (memref, mode, addr, 1);
1909 enum machine_mode mmode = GET_MODE (new);
1911 MEM_ATTRS (new)
1912 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1913 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1914 (mmode == BLKmode ? BITS_PER_UNIT
1915 : GET_MODE_ALIGNMENT (mmode)),
1916 mmode);
1918 return new;
1921 /* Return a memory reference like MEMREF, but with its mode changed
1922 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1923 nonzero, the memory address is forced to be valid.
1924 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1925 and caller is responsible for adjusting MEMREF base register. */
1928 adjust_address_1 (memref, mode, offset, validate, adjust)
1929 rtx memref;
1930 enum machine_mode mode;
1931 HOST_WIDE_INT offset;
1932 int validate, adjust;
1934 rtx addr = XEXP (memref, 0);
1935 rtx new;
1936 rtx memoffset = MEM_OFFSET (memref);
1937 rtx size = 0;
1938 unsigned int memalign = MEM_ALIGN (memref);
1940 /* ??? Prefer to create garbage instead of creating shared rtl.
1941 This may happen even if offset is non-zero -- consider
1942 (plus (plus reg reg) const_int) -- so do this always. */
1943 addr = copy_rtx (addr);
1945 if (adjust)
1947 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1948 object, we can merge it into the LO_SUM. */
1949 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1950 && offset >= 0
1951 && (unsigned HOST_WIDE_INT) offset
1952 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1953 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1954 plus_constant (XEXP (addr, 1), offset));
1955 else
1956 addr = plus_constant (addr, offset);
1959 new = change_address_1 (memref, mode, addr, validate);
1961 /* Compute the new values of the memory attributes due to this adjustment.
1962 We add the offsets and update the alignment. */
1963 if (memoffset)
1964 memoffset = GEN_INT (offset + INTVAL (memoffset));
1966 /* Compute the new alignment by taking the MIN of the alignment and the
1967 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1968 if zero. */
1969 if (offset != 0)
1970 memalign
1971 = MIN (memalign,
1972 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1974 /* We can compute the size in a number of ways. */
1975 if (GET_MODE (new) != BLKmode)
1976 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1977 else if (MEM_SIZE (memref))
1978 size = plus_constant (MEM_SIZE (memref), -offset);
1980 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1981 memoffset, size, memalign, GET_MODE (new));
1983 /* At some point, we should validate that this offset is within the object,
1984 if all the appropriate values are known. */
1985 return new;
1988 /* Return a memory reference like MEMREF, but with its mode changed
1989 to MODE and its address changed to ADDR, which is assumed to be
1990 MEMREF offseted by OFFSET bytes. If VALIDATE is
1991 nonzero, the memory address is forced to be valid. */
1994 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
1995 rtx memref;
1996 enum machine_mode mode;
1997 rtx addr;
1998 HOST_WIDE_INT offset;
1999 int validate;
2001 memref = change_address_1 (memref, VOIDmode, addr, validate);
2002 return adjust_address_1 (memref, mode, offset, validate, 0);
2005 /* Return a memory reference like MEMREF, but whose address is changed by
2006 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2007 known to be in OFFSET (possibly 1). */
2010 offset_address (memref, offset, pow2)
2011 rtx memref;
2012 rtx offset;
2013 HOST_WIDE_INT pow2;
2015 rtx new, addr = XEXP (memref, 0);
2017 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2019 /* At this point we don't know _why_ the address is invalid. It
2020 could have secondary memory refereces, multiplies or anything.
2022 However, if we did go and rearrange things, we can wind up not
2023 being able to recognize the magic around pic_offset_table_rtx.
2024 This stuff is fragile, and is yet another example of why it is
2025 bad to expose PIC machinery too early. */
2026 if (! memory_address_p (GET_MODE (memref), new)
2027 && GET_CODE (addr) == PLUS
2028 && XEXP (addr, 0) == pic_offset_table_rtx)
2030 addr = force_reg (GET_MODE (addr), addr);
2031 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2034 update_temp_slot_address (XEXP (memref, 0), new);
2035 new = change_address_1 (memref, VOIDmode, new, 1);
2037 /* Update the alignment to reflect the offset. Reset the offset, which
2038 we don't know. */
2039 MEM_ATTRS (new)
2040 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2041 MIN (MEM_ALIGN (memref),
2042 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2043 GET_MODE (new));
2044 return new;
2047 /* Return a memory reference like MEMREF, but with its address changed to
2048 ADDR. The caller is asserting that the actual piece of memory pointed
2049 to is the same, just the form of the address is being changed, such as
2050 by putting something into a register. */
2053 replace_equiv_address (memref, addr)
2054 rtx memref;
2055 rtx addr;
2057 /* change_address_1 copies the memory attribute structure without change
2058 and that's exactly what we want here. */
2059 update_temp_slot_address (XEXP (memref, 0), addr);
2060 return change_address_1 (memref, VOIDmode, addr, 1);
2063 /* Likewise, but the reference is not required to be valid. */
2066 replace_equiv_address_nv (memref, addr)
2067 rtx memref;
2068 rtx addr;
2070 return change_address_1 (memref, VOIDmode, addr, 0);
2073 /* Return a memory reference like MEMREF, but with its mode widened to
2074 MODE and offset by OFFSET. This would be used by targets that e.g.
2075 cannot issue QImode memory operations and have to use SImode memory
2076 operations plus masking logic. */
2079 widen_memory_access (memref, mode, offset)
2080 rtx memref;
2081 enum machine_mode mode;
2082 HOST_WIDE_INT offset;
2084 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2085 tree expr = MEM_EXPR (new);
2086 rtx memoffset = MEM_OFFSET (new);
2087 unsigned int size = GET_MODE_SIZE (mode);
2089 /* If we don't know what offset we were at within the expression, then
2090 we can't know if we've overstepped the bounds. */
2091 if (! memoffset)
2092 expr = NULL_TREE;
2094 while (expr)
2096 if (TREE_CODE (expr) == COMPONENT_REF)
2098 tree field = TREE_OPERAND (expr, 1);
2100 if (! DECL_SIZE_UNIT (field))
2102 expr = NULL_TREE;
2103 break;
2106 /* Is the field at least as large as the access? If so, ok,
2107 otherwise strip back to the containing structure. */
2108 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2109 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2110 && INTVAL (memoffset) >= 0)
2111 break;
2113 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2115 expr = NULL_TREE;
2116 break;
2119 expr = TREE_OPERAND (expr, 0);
2120 memoffset = (GEN_INT (INTVAL (memoffset)
2121 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2122 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2123 / BITS_PER_UNIT)));
2125 /* Similarly for the decl. */
2126 else if (DECL_P (expr)
2127 && DECL_SIZE_UNIT (expr)
2128 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2129 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2130 && (! memoffset || INTVAL (memoffset) >= 0))
2131 break;
2132 else
2134 /* The widened memory access overflows the expression, which means
2135 that it could alias another expression. Zap it. */
2136 expr = NULL_TREE;
2137 break;
2141 if (! expr)
2142 memoffset = NULL_RTX;
2144 /* The widened memory may alias other stuff, so zap the alias set. */
2145 /* ??? Maybe use get_alias_set on any remaining expression. */
2147 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2148 MEM_ALIGN (new), mode);
2150 return new;
2153 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2156 gen_label_rtx ()
2158 rtx label;
2160 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2161 NULL, label_num++, NULL, NULL);
2163 LABEL_NUSES (label) = 0;
2164 LABEL_ALTERNATE_NAME (label) = NULL;
2165 return label;
2168 /* For procedure integration. */
2170 /* Install new pointers to the first and last insns in the chain.
2171 Also, set cur_insn_uid to one higher than the last in use.
2172 Used for an inline-procedure after copying the insn chain. */
2174 void
2175 set_new_first_and_last_insn (first, last)
2176 rtx first, last;
2178 rtx insn;
2180 first_insn = first;
2181 last_insn = last;
2182 cur_insn_uid = 0;
2184 for (insn = first; insn; insn = NEXT_INSN (insn))
2185 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2187 cur_insn_uid++;
2190 /* Set the range of label numbers found in the current function.
2191 This is used when belatedly compiling an inline function. */
2193 void
2194 set_new_first_and_last_label_num (first, last)
2195 int first, last;
2197 base_label_num = label_num;
2198 first_label_num = first;
2199 last_label_num = last;
2202 /* Set the last label number found in the current function.
2203 This is used when belatedly compiling an inline function. */
2205 void
2206 set_new_last_label_num (last)
2207 int last;
2209 base_label_num = label_num;
2210 last_label_num = last;
2213 /* Restore all variables describing the current status from the structure *P.
2214 This is used after a nested function. */
2216 void
2217 restore_emit_status (p)
2218 struct function *p ATTRIBUTE_UNUSED;
2220 last_label_num = 0;
2223 /* Go through all the RTL insn bodies and copy any invalid shared
2224 structure. This routine should only be called once. */
2226 void
2227 unshare_all_rtl (fndecl, insn)
2228 tree fndecl;
2229 rtx insn;
2231 tree decl;
2233 /* Make sure that virtual parameters are not shared. */
2234 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2235 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2237 /* Make sure that virtual stack slots are not shared. */
2238 unshare_all_decls (DECL_INITIAL (fndecl));
2240 /* Unshare just about everything else. */
2241 unshare_all_rtl_1 (insn);
2243 /* Make sure the addresses of stack slots found outside the insn chain
2244 (such as, in DECL_RTL of a variable) are not shared
2245 with the insn chain.
2247 This special care is necessary when the stack slot MEM does not
2248 actually appear in the insn chain. If it does appear, its address
2249 is unshared from all else at that point. */
2250 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2253 /* Go through all the RTL insn bodies and copy any invalid shared
2254 structure, again. This is a fairly expensive thing to do so it
2255 should be done sparingly. */
2257 void
2258 unshare_all_rtl_again (insn)
2259 rtx insn;
2261 rtx p;
2262 tree decl;
2264 for (p = insn; p; p = NEXT_INSN (p))
2265 if (INSN_P (p))
2267 reset_used_flags (PATTERN (p));
2268 reset_used_flags (REG_NOTES (p));
2269 reset_used_flags (LOG_LINKS (p));
2272 /* Make sure that virtual stack slots are not shared. */
2273 reset_used_decls (DECL_INITIAL (cfun->decl));
2275 /* Make sure that virtual parameters are not shared. */
2276 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2277 reset_used_flags (DECL_RTL (decl));
2279 reset_used_flags (stack_slot_list);
2281 unshare_all_rtl (cfun->decl, insn);
2284 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2285 Assumes the mark bits are cleared at entry. */
2287 static void
2288 unshare_all_rtl_1 (insn)
2289 rtx insn;
2291 for (; insn; insn = NEXT_INSN (insn))
2292 if (INSN_P (insn))
2294 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2295 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2296 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2300 /* Go through all virtual stack slots of a function and copy any
2301 shared structure. */
2302 static void
2303 unshare_all_decls (blk)
2304 tree blk;
2306 tree t;
2308 /* Copy shared decls. */
2309 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2310 if (DECL_RTL_SET_P (t))
2311 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2313 /* Now process sub-blocks. */
2314 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2315 unshare_all_decls (t);
2318 /* Go through all virtual stack slots of a function and mark them as
2319 not shared. */
2320 static void
2321 reset_used_decls (blk)
2322 tree blk;
2324 tree t;
2326 /* Mark decls. */
2327 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2328 if (DECL_RTL_SET_P (t))
2329 reset_used_flags (DECL_RTL (t));
2331 /* Now process sub-blocks. */
2332 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2333 reset_used_decls (t);
2336 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2337 placed in the result directly, rather than being copied. MAY_SHARE is
2338 either a MEM of an EXPR_LIST of MEMs. */
2341 copy_most_rtx (orig, may_share)
2342 rtx orig;
2343 rtx may_share;
2345 rtx copy;
2346 int i, j;
2347 RTX_CODE code;
2348 const char *format_ptr;
2350 if (orig == may_share
2351 || (GET_CODE (may_share) == EXPR_LIST
2352 && in_expr_list_p (may_share, orig)))
2353 return orig;
2355 code = GET_CODE (orig);
2357 switch (code)
2359 case REG:
2360 case QUEUED:
2361 case CONST_INT:
2362 case CONST_DOUBLE:
2363 case CONST_VECTOR:
2364 case SYMBOL_REF:
2365 case CODE_LABEL:
2366 case PC:
2367 case CC0:
2368 return orig;
2369 default:
2370 break;
2373 copy = rtx_alloc (code);
2374 PUT_MODE (copy, GET_MODE (orig));
2375 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2376 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2377 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2378 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2379 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2381 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2383 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2385 switch (*format_ptr++)
2387 case 'e':
2388 XEXP (copy, i) = XEXP (orig, i);
2389 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2390 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2391 break;
2393 case 'u':
2394 XEXP (copy, i) = XEXP (orig, i);
2395 break;
2397 case 'E':
2398 case 'V':
2399 XVEC (copy, i) = XVEC (orig, i);
2400 if (XVEC (orig, i) != NULL)
2402 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2403 for (j = 0; j < XVECLEN (copy, i); j++)
2404 XVECEXP (copy, i, j)
2405 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2407 break;
2409 case 'w':
2410 XWINT (copy, i) = XWINT (orig, i);
2411 break;
2413 case 'n':
2414 case 'i':
2415 XINT (copy, i) = XINT (orig, i);
2416 break;
2418 case 't':
2419 XTREE (copy, i) = XTREE (orig, i);
2420 break;
2422 case 's':
2423 case 'S':
2424 XSTR (copy, i) = XSTR (orig, i);
2425 break;
2427 case '0':
2428 /* Copy this through the wide int field; that's safest. */
2429 X0WINT (copy, i) = X0WINT (orig, i);
2430 break;
2432 default:
2433 abort ();
2436 return copy;
2439 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2440 Recursively does the same for subexpressions. */
2443 copy_rtx_if_shared (orig)
2444 rtx orig;
2446 rtx x = orig;
2447 int i;
2448 enum rtx_code code;
2449 const char *format_ptr;
2450 int copied = 0;
2452 if (x == 0)
2453 return 0;
2455 code = GET_CODE (x);
2457 /* These types may be freely shared. */
2459 switch (code)
2461 case REG:
2462 case QUEUED:
2463 case CONST_INT:
2464 case CONST_DOUBLE:
2465 case CONST_VECTOR:
2466 case SYMBOL_REF:
2467 case CODE_LABEL:
2468 case PC:
2469 case CC0:
2470 case SCRATCH:
2471 /* SCRATCH must be shared because they represent distinct values. */
2472 return x;
2474 case CONST:
2475 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2476 a LABEL_REF, it isn't sharable. */
2477 if (GET_CODE (XEXP (x, 0)) == PLUS
2478 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2479 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2480 return x;
2481 break;
2483 case INSN:
2484 case JUMP_INSN:
2485 case CALL_INSN:
2486 case NOTE:
2487 case BARRIER:
2488 /* The chain of insns is not being copied. */
2489 return x;
2491 case MEM:
2492 /* A MEM is allowed to be shared if its address is constant.
2494 We used to allow sharing of MEMs which referenced
2495 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2496 that can lose. instantiate_virtual_regs will not unshare
2497 the MEMs, and combine may change the structure of the address
2498 because it looks safe and profitable in one context, but
2499 in some other context it creates unrecognizable RTL. */
2500 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2501 return x;
2503 break;
2505 default:
2506 break;
2509 /* This rtx may not be shared. If it has already been seen,
2510 replace it with a copy of itself. */
2512 if (RTX_FLAG (x, used))
2514 rtx copy;
2516 copy = rtx_alloc (code);
2517 memcpy (copy, x,
2518 (sizeof (*copy) - sizeof (copy->fld)
2519 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2520 x = copy;
2521 copied = 1;
2523 RTX_FLAG (x, used) = 1;
2525 /* Now scan the subexpressions recursively.
2526 We can store any replaced subexpressions directly into X
2527 since we know X is not shared! Any vectors in X
2528 must be copied if X was copied. */
2530 format_ptr = GET_RTX_FORMAT (code);
2532 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2534 switch (*format_ptr++)
2536 case 'e':
2537 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2538 break;
2540 case 'E':
2541 if (XVEC (x, i) != NULL)
2543 int j;
2544 int len = XVECLEN (x, i);
2546 if (copied && len > 0)
2547 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2548 for (j = 0; j < len; j++)
2549 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2551 break;
2554 return x;
2557 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2558 to look for shared sub-parts. */
2560 void
2561 reset_used_flags (x)
2562 rtx x;
2564 int i, j;
2565 enum rtx_code code;
2566 const char *format_ptr;
2568 if (x == 0)
2569 return;
2571 code = GET_CODE (x);
2573 /* These types may be freely shared so we needn't do any resetting
2574 for them. */
2576 switch (code)
2578 case REG:
2579 case QUEUED:
2580 case CONST_INT:
2581 case CONST_DOUBLE:
2582 case CONST_VECTOR:
2583 case SYMBOL_REF:
2584 case CODE_LABEL:
2585 case PC:
2586 case CC0:
2587 return;
2589 case INSN:
2590 case JUMP_INSN:
2591 case CALL_INSN:
2592 case NOTE:
2593 case LABEL_REF:
2594 case BARRIER:
2595 /* The chain of insns is not being copied. */
2596 return;
2598 default:
2599 break;
2602 RTX_FLAG (x, used) = 0;
2604 format_ptr = GET_RTX_FORMAT (code);
2605 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2607 switch (*format_ptr++)
2609 case 'e':
2610 reset_used_flags (XEXP (x, i));
2611 break;
2613 case 'E':
2614 for (j = 0; j < XVECLEN (x, i); j++)
2615 reset_used_flags (XVECEXP (x, i, j));
2616 break;
2621 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2622 Return X or the rtx for the pseudo reg the value of X was copied into.
2623 OTHER must be valid as a SET_DEST. */
2626 make_safe_from (x, other)
2627 rtx x, other;
2629 while (1)
2630 switch (GET_CODE (other))
2632 case SUBREG:
2633 other = SUBREG_REG (other);
2634 break;
2635 case STRICT_LOW_PART:
2636 case SIGN_EXTEND:
2637 case ZERO_EXTEND:
2638 other = XEXP (other, 0);
2639 break;
2640 default:
2641 goto done;
2643 done:
2644 if ((GET_CODE (other) == MEM
2645 && ! CONSTANT_P (x)
2646 && GET_CODE (x) != REG
2647 && GET_CODE (x) != SUBREG)
2648 || (GET_CODE (other) == REG
2649 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2650 || reg_mentioned_p (other, x))))
2652 rtx temp = gen_reg_rtx (GET_MODE (x));
2653 emit_move_insn (temp, x);
2654 return temp;
2656 return x;
2659 /* Emission of insns (adding them to the doubly-linked list). */
2661 /* Return the first insn of the current sequence or current function. */
2664 get_insns ()
2666 return first_insn;
2669 /* Specify a new insn as the first in the chain. */
2671 void
2672 set_first_insn (insn)
2673 rtx insn;
2675 if (PREV_INSN (insn) != 0)
2676 abort ();
2677 first_insn = insn;
2680 /* Return the last insn emitted in current sequence or current function. */
2683 get_last_insn ()
2685 return last_insn;
2688 /* Specify a new insn as the last in the chain. */
2690 void
2691 set_last_insn (insn)
2692 rtx insn;
2694 if (NEXT_INSN (insn) != 0)
2695 abort ();
2696 last_insn = insn;
2699 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2702 get_last_insn_anywhere ()
2704 struct sequence_stack *stack;
2705 if (last_insn)
2706 return last_insn;
2707 for (stack = seq_stack; stack; stack = stack->next)
2708 if (stack->last != 0)
2709 return stack->last;
2710 return 0;
2713 /* Return the first nonnote insn emitted in current sequence or current
2714 function. This routine looks inside SEQUENCEs. */
2717 get_first_nonnote_insn ()
2719 rtx insn = first_insn;
2721 while (insn)
2723 insn = next_insn (insn);
2724 if (insn == 0 || GET_CODE (insn) != NOTE)
2725 break;
2728 return insn;
2731 /* Return the last nonnote insn emitted in current sequence or current
2732 function. This routine looks inside SEQUENCEs. */
2735 get_last_nonnote_insn ()
2737 rtx insn = last_insn;
2739 while (insn)
2741 insn = previous_insn (insn);
2742 if (insn == 0 || GET_CODE (insn) != NOTE)
2743 break;
2746 return insn;
2749 /* Return a number larger than any instruction's uid in this function. */
2752 get_max_uid ()
2754 return cur_insn_uid;
2757 /* Renumber instructions so that no instruction UIDs are wasted. */
2759 void
2760 renumber_insns (stream)
2761 FILE *stream;
2763 rtx insn;
2765 /* If we're not supposed to renumber instructions, don't. */
2766 if (!flag_renumber_insns)
2767 return;
2769 /* If there aren't that many instructions, then it's not really
2770 worth renumbering them. */
2771 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2772 return;
2774 cur_insn_uid = 1;
2776 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2778 if (stream)
2779 fprintf (stream, "Renumbering insn %d to %d\n",
2780 INSN_UID (insn), cur_insn_uid);
2781 INSN_UID (insn) = cur_insn_uid++;
2785 /* Return the next insn. If it is a SEQUENCE, return the first insn
2786 of the sequence. */
2789 next_insn (insn)
2790 rtx insn;
2792 if (insn)
2794 insn = NEXT_INSN (insn);
2795 if (insn && GET_CODE (insn) == INSN
2796 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2797 insn = XVECEXP (PATTERN (insn), 0, 0);
2800 return insn;
2803 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2804 of the sequence. */
2807 previous_insn (insn)
2808 rtx insn;
2810 if (insn)
2812 insn = PREV_INSN (insn);
2813 if (insn && GET_CODE (insn) == INSN
2814 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2815 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2818 return insn;
2821 /* Return the next insn after INSN that is not a NOTE. This routine does not
2822 look inside SEQUENCEs. */
2825 next_nonnote_insn (insn)
2826 rtx insn;
2828 while (insn)
2830 insn = NEXT_INSN (insn);
2831 if (insn == 0 || GET_CODE (insn) != NOTE)
2832 break;
2835 return insn;
2838 /* Return the previous insn before INSN that is not a NOTE. This routine does
2839 not look inside SEQUENCEs. */
2842 prev_nonnote_insn (insn)
2843 rtx insn;
2845 while (insn)
2847 insn = PREV_INSN (insn);
2848 if (insn == 0 || GET_CODE (insn) != NOTE)
2849 break;
2852 return insn;
2855 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2856 or 0, if there is none. This routine does not look inside
2857 SEQUENCEs. */
2860 next_real_insn (insn)
2861 rtx insn;
2863 while (insn)
2865 insn = NEXT_INSN (insn);
2866 if (insn == 0 || GET_CODE (insn) == INSN
2867 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2868 break;
2871 return insn;
2874 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2875 or 0, if there is none. This routine does not look inside
2876 SEQUENCEs. */
2879 prev_real_insn (insn)
2880 rtx insn;
2882 while (insn)
2884 insn = PREV_INSN (insn);
2885 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2886 || GET_CODE (insn) == JUMP_INSN)
2887 break;
2890 return insn;
2893 /* Find the next insn after INSN that really does something. This routine
2894 does not look inside SEQUENCEs. Until reload has completed, this is the
2895 same as next_real_insn. */
2898 active_insn_p (insn)
2899 rtx insn;
2901 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2902 || (GET_CODE (insn) == INSN
2903 && (! reload_completed
2904 || (GET_CODE (PATTERN (insn)) != USE
2905 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2909 next_active_insn (insn)
2910 rtx insn;
2912 while (insn)
2914 insn = NEXT_INSN (insn);
2915 if (insn == 0 || active_insn_p (insn))
2916 break;
2919 return insn;
2922 /* Find the last insn before INSN that really does something. This routine
2923 does not look inside SEQUENCEs. Until reload has completed, this is the
2924 same as prev_real_insn. */
2927 prev_active_insn (insn)
2928 rtx insn;
2930 while (insn)
2932 insn = PREV_INSN (insn);
2933 if (insn == 0 || active_insn_p (insn))
2934 break;
2937 return insn;
2940 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2943 next_label (insn)
2944 rtx insn;
2946 while (insn)
2948 insn = NEXT_INSN (insn);
2949 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2950 break;
2953 return insn;
2956 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2959 prev_label (insn)
2960 rtx insn;
2962 while (insn)
2964 insn = PREV_INSN (insn);
2965 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2966 break;
2969 return insn;
2972 #ifdef HAVE_cc0
2973 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2974 and REG_CC_USER notes so we can find it. */
2976 void
2977 link_cc0_insns (insn)
2978 rtx insn;
2980 rtx user = next_nonnote_insn (insn);
2982 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2983 user = XVECEXP (PATTERN (user), 0, 0);
2985 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2986 REG_NOTES (user));
2987 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2990 /* Return the next insn that uses CC0 after INSN, which is assumed to
2991 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2992 applied to the result of this function should yield INSN).
2994 Normally, this is simply the next insn. However, if a REG_CC_USER note
2995 is present, it contains the insn that uses CC0.
2997 Return 0 if we can't find the insn. */
3000 next_cc0_user (insn)
3001 rtx insn;
3003 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3005 if (note)
3006 return XEXP (note, 0);
3008 insn = next_nonnote_insn (insn);
3009 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3010 insn = XVECEXP (PATTERN (insn), 0, 0);
3012 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3013 return insn;
3015 return 0;
3018 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3019 note, it is the previous insn. */
3022 prev_cc0_setter (insn)
3023 rtx insn;
3025 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3027 if (note)
3028 return XEXP (note, 0);
3030 insn = prev_nonnote_insn (insn);
3031 if (! sets_cc0_p (PATTERN (insn)))
3032 abort ();
3034 return insn;
3036 #endif
3038 /* Increment the label uses for all labels present in rtx. */
3040 static void
3041 mark_label_nuses (x)
3042 rtx x;
3044 enum rtx_code code;
3045 int i, j;
3046 const char *fmt;
3048 code = GET_CODE (x);
3049 if (code == LABEL_REF)
3050 LABEL_NUSES (XEXP (x, 0))++;
3052 fmt = GET_RTX_FORMAT (code);
3053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3055 if (fmt[i] == 'e')
3056 mark_label_nuses (XEXP (x, i));
3057 else if (fmt[i] == 'E')
3058 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3059 mark_label_nuses (XVECEXP (x, i, j));
3064 /* Try splitting insns that can be split for better scheduling.
3065 PAT is the pattern which might split.
3066 TRIAL is the insn providing PAT.
3067 LAST is non-zero if we should return the last insn of the sequence produced.
3069 If this routine succeeds in splitting, it returns the first or last
3070 replacement insn depending on the value of LAST. Otherwise, it
3071 returns TRIAL. If the insn to be returned can be split, it will be. */
3074 try_split (pat, trial, last)
3075 rtx pat, trial;
3076 int last;
3078 rtx before = PREV_INSN (trial);
3079 rtx after = NEXT_INSN (trial);
3080 int has_barrier = 0;
3081 rtx tem;
3082 rtx note, seq;
3083 int probability;
3085 if (any_condjump_p (trial)
3086 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3087 split_branch_probability = INTVAL (XEXP (note, 0));
3088 probability = split_branch_probability;
3090 seq = split_insns (pat, trial);
3092 split_branch_probability = -1;
3094 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3095 We may need to handle this specially. */
3096 if (after && GET_CODE (after) == BARRIER)
3098 has_barrier = 1;
3099 after = NEXT_INSN (after);
3102 if (seq)
3104 /* Sometimes there will be only one insn in that list, this case will
3105 normally arise only when we want it in turn to be split (SFmode on
3106 the 29k is an example). */
3107 if (NEXT_INSN (seq) != NULL_RTX)
3109 rtx insn_last, insn;
3110 int njumps = 0;
3112 /* Avoid infinite loop if any insn of the result matches
3113 the original pattern. */
3114 insn_last = seq;
3115 while (1)
3117 if (INSN_P (insn_last)
3118 && rtx_equal_p (PATTERN (insn_last), pat))
3119 return trial;
3120 if (NEXT_INSN (insn_last) == NULL_RTX)
3121 break;
3122 insn_last = NEXT_INSN (insn_last);
3125 /* Mark labels. */
3126 insn = insn_last;
3127 while (insn != NULL_RTX)
3129 if (GET_CODE (insn) == JUMP_INSN)
3131 mark_jump_label (PATTERN (insn), insn, 0);
3132 njumps++;
3133 if (probability != -1
3134 && any_condjump_p (insn)
3135 && !find_reg_note (insn, REG_BR_PROB, 0))
3137 /* We can preserve the REG_BR_PROB notes only if exactly
3138 one jump is created, otherwise the machine description
3139 is responsible for this step using
3140 split_branch_probability variable. */
3141 if (njumps != 1)
3142 abort ();
3143 REG_NOTES (insn)
3144 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3145 GEN_INT (probability),
3146 REG_NOTES (insn));
3150 insn = PREV_INSN (insn);
3153 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3154 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3155 if (GET_CODE (trial) == CALL_INSN)
3157 insn = insn_last;
3158 while (insn != NULL_RTX)
3160 if (GET_CODE (insn) == CALL_INSN)
3161 CALL_INSN_FUNCTION_USAGE (insn)
3162 = CALL_INSN_FUNCTION_USAGE (trial);
3164 insn = PREV_INSN (insn);
3168 /* Copy notes, particularly those related to the CFG. */
3169 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3171 switch (REG_NOTE_KIND (note))
3173 case REG_EH_REGION:
3174 insn = insn_last;
3175 while (insn != NULL_RTX)
3177 if (GET_CODE (insn) == CALL_INSN
3178 || (flag_non_call_exceptions
3179 && may_trap_p (PATTERN (insn))))
3180 REG_NOTES (insn)
3181 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3182 XEXP (note, 0),
3183 REG_NOTES (insn));
3184 insn = PREV_INSN (insn);
3186 break;
3188 case REG_NORETURN:
3189 case REG_SETJMP:
3190 case REG_ALWAYS_RETURN:
3191 insn = insn_last;
3192 while (insn != NULL_RTX)
3194 if (GET_CODE (insn) == CALL_INSN)
3195 REG_NOTES (insn)
3196 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3197 XEXP (note, 0),
3198 REG_NOTES (insn));
3199 insn = PREV_INSN (insn);
3201 break;
3203 case REG_NON_LOCAL_GOTO:
3204 insn = insn_last;
3205 while (insn != NULL_RTX)
3207 if (GET_CODE (insn) == JUMP_INSN)
3208 REG_NOTES (insn)
3209 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3210 XEXP (note, 0),
3211 REG_NOTES (insn));
3212 insn = PREV_INSN (insn);
3214 break;
3216 default:
3217 break;
3221 /* If there are LABELS inside the split insns increment the
3222 usage count so we don't delete the label. */
3223 if (GET_CODE (trial) == INSN)
3225 insn = last_insn;
3226 while (insn != NULL_RTX)
3228 if (GET_CODE (insn) == INSN)
3229 mark_label_nuses (PATTERN (insn));
3231 insn = PREV_INSN (insn);
3235 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3237 delete_insn (trial);
3238 if (has_barrier)
3239 emit_barrier_after (tem);
3241 /* Recursively call try_split for each new insn created; by the
3242 time control returns here that insn will be fully split, so
3243 set LAST and continue from the insn after the one returned.
3244 We can't use next_active_insn here since AFTER may be a note.
3245 Ignore deleted insns, which can be occur if not optimizing. */
3246 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3247 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3248 tem = try_split (PATTERN (tem), tem, 1);
3250 /* Avoid infinite loop if the result matches the original pattern. */
3251 else if (rtx_equal_p (PATTERN (seq), pat))
3252 return trial;
3253 else
3255 PATTERN (trial) = PATTERN (seq);
3256 INSN_CODE (trial) = -1;
3257 try_split (PATTERN (trial), trial, last);
3260 /* Return either the first or the last insn, depending on which was
3261 requested. */
3262 return last
3263 ? (after ? PREV_INSN (after) : last_insn)
3264 : NEXT_INSN (before);
3267 return trial;
3270 /* Make and return an INSN rtx, initializing all its slots.
3271 Store PATTERN in the pattern slots. */
3274 make_insn_raw (pattern)
3275 rtx pattern;
3277 rtx insn;
3279 insn = rtx_alloc (INSN);
3281 INSN_UID (insn) = cur_insn_uid++;
3282 PATTERN (insn) = pattern;
3283 INSN_CODE (insn) = -1;
3284 LOG_LINKS (insn) = NULL;
3285 REG_NOTES (insn) = NULL;
3286 INSN_SCOPE (insn) = NULL;
3287 BLOCK_FOR_INSN (insn) = NULL;
3289 #ifdef ENABLE_RTL_CHECKING
3290 if (insn
3291 && INSN_P (insn)
3292 && (returnjump_p (insn)
3293 || (GET_CODE (insn) == SET
3294 && SET_DEST (insn) == pc_rtx)))
3296 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3297 debug_rtx (insn);
3299 #endif
3301 return insn;
3304 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3306 static rtx
3307 make_jump_insn_raw (pattern)
3308 rtx pattern;
3310 rtx insn;
3312 insn = rtx_alloc (JUMP_INSN);
3313 INSN_UID (insn) = cur_insn_uid++;
3315 PATTERN (insn) = pattern;
3316 INSN_CODE (insn) = -1;
3317 LOG_LINKS (insn) = NULL;
3318 REG_NOTES (insn) = NULL;
3319 JUMP_LABEL (insn) = NULL;
3320 INSN_SCOPE (insn) = NULL;
3321 BLOCK_FOR_INSN (insn) = NULL;
3323 return insn;
3326 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3328 static rtx
3329 make_call_insn_raw (pattern)
3330 rtx pattern;
3332 rtx insn;
3334 insn = rtx_alloc (CALL_INSN);
3335 INSN_UID (insn) = cur_insn_uid++;
3337 PATTERN (insn) = pattern;
3338 INSN_CODE (insn) = -1;
3339 LOG_LINKS (insn) = NULL;
3340 REG_NOTES (insn) = NULL;
3341 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3342 INSN_SCOPE (insn) = NULL;
3343 BLOCK_FOR_INSN (insn) = NULL;
3345 return insn;
3348 /* Add INSN to the end of the doubly-linked list.
3349 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3351 void
3352 add_insn (insn)
3353 rtx insn;
3355 PREV_INSN (insn) = last_insn;
3356 NEXT_INSN (insn) = 0;
3358 if (NULL != last_insn)
3359 NEXT_INSN (last_insn) = insn;
3361 if (NULL == first_insn)
3362 first_insn = insn;
3364 last_insn = insn;
3367 /* Add INSN into the doubly-linked list after insn AFTER. This and
3368 the next should be the only functions called to insert an insn once
3369 delay slots have been filled since only they know how to update a
3370 SEQUENCE. */
3372 void
3373 add_insn_after (insn, after)
3374 rtx insn, after;
3376 rtx next = NEXT_INSN (after);
3377 basic_block bb;
3379 if (optimize && INSN_DELETED_P (after))
3380 abort ();
3382 NEXT_INSN (insn) = next;
3383 PREV_INSN (insn) = after;
3385 if (next)
3387 PREV_INSN (next) = insn;
3388 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3389 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3391 else if (last_insn == after)
3392 last_insn = insn;
3393 else
3395 struct sequence_stack *stack = seq_stack;
3396 /* Scan all pending sequences too. */
3397 for (; stack; stack = stack->next)
3398 if (after == stack->last)
3400 stack->last = insn;
3401 break;
3404 if (stack == 0)
3405 abort ();
3408 if (GET_CODE (after) != BARRIER
3409 && GET_CODE (insn) != BARRIER
3410 && (bb = BLOCK_FOR_INSN (after)))
3412 set_block_for_insn (insn, bb);
3413 if (INSN_P (insn))
3414 bb->flags |= BB_DIRTY;
3415 /* Should not happen as first in the BB is always
3416 either NOTE or LABEL. */
3417 if (bb->end == after
3418 /* Avoid clobbering of structure when creating new BB. */
3419 && GET_CODE (insn) != BARRIER
3420 && (GET_CODE (insn) != NOTE
3421 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3422 bb->end = insn;
3425 NEXT_INSN (after) = insn;
3426 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3428 rtx sequence = PATTERN (after);
3429 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3433 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3434 the previous should be the only functions called to insert an insn once
3435 delay slots have been filled since only they know how to update a
3436 SEQUENCE. */
3438 void
3439 add_insn_before (insn, before)
3440 rtx insn, before;
3442 rtx prev = PREV_INSN (before);
3443 basic_block bb;
3445 if (optimize && INSN_DELETED_P (before))
3446 abort ();
3448 PREV_INSN (insn) = prev;
3449 NEXT_INSN (insn) = before;
3451 if (prev)
3453 NEXT_INSN (prev) = insn;
3454 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3456 rtx sequence = PATTERN (prev);
3457 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3460 else if (first_insn == before)
3461 first_insn = insn;
3462 else
3464 struct sequence_stack *stack = seq_stack;
3465 /* Scan all pending sequences too. */
3466 for (; stack; stack = stack->next)
3467 if (before == stack->first)
3469 stack->first = insn;
3470 break;
3473 if (stack == 0)
3474 abort ();
3477 if (GET_CODE (before) != BARRIER
3478 && GET_CODE (insn) != BARRIER
3479 && (bb = BLOCK_FOR_INSN (before)))
3481 set_block_for_insn (insn, bb);
3482 if (INSN_P (insn))
3483 bb->flags |= BB_DIRTY;
3484 /* Should not happen as first in the BB is always
3485 either NOTE or LABEl. */
3486 if (bb->head == insn
3487 /* Avoid clobbering of structure when creating new BB. */
3488 && GET_CODE (insn) != BARRIER
3489 && (GET_CODE (insn) != NOTE
3490 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3491 abort ();
3494 PREV_INSN (before) = insn;
3495 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3496 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3499 /* Remove an insn from its doubly-linked list. This function knows how
3500 to handle sequences. */
3501 void
3502 remove_insn (insn)
3503 rtx insn;
3505 rtx next = NEXT_INSN (insn);
3506 rtx prev = PREV_INSN (insn);
3507 basic_block bb;
3509 if (prev)
3511 NEXT_INSN (prev) = next;
3512 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3514 rtx sequence = PATTERN (prev);
3515 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3518 else if (first_insn == insn)
3519 first_insn = next;
3520 else
3522 struct sequence_stack *stack = seq_stack;
3523 /* Scan all pending sequences too. */
3524 for (; stack; stack = stack->next)
3525 if (insn == stack->first)
3527 stack->first = next;
3528 break;
3531 if (stack == 0)
3532 abort ();
3535 if (next)
3537 PREV_INSN (next) = prev;
3538 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3539 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3541 else if (last_insn == insn)
3542 last_insn = prev;
3543 else
3545 struct sequence_stack *stack = seq_stack;
3546 /* Scan all pending sequences too. */
3547 for (; stack; stack = stack->next)
3548 if (insn == stack->last)
3550 stack->last = prev;
3551 break;
3554 if (stack == 0)
3555 abort ();
3557 if (GET_CODE (insn) != BARRIER
3558 && (bb = BLOCK_FOR_INSN (insn)))
3560 if (INSN_P (insn))
3561 bb->flags |= BB_DIRTY;
3562 if (bb->head == insn)
3564 /* Never ever delete the basic block note without deleting whole
3565 basic block. */
3566 if (GET_CODE (insn) == NOTE)
3567 abort ();
3568 bb->head = next;
3570 if (bb->end == insn)
3571 bb->end = prev;
3575 /* Delete all insns made since FROM.
3576 FROM becomes the new last instruction. */
3578 void
3579 delete_insns_since (from)
3580 rtx from;
3582 if (from == 0)
3583 first_insn = 0;
3584 else
3585 NEXT_INSN (from) = 0;
3586 last_insn = from;
3589 /* This function is deprecated, please use sequences instead.
3591 Move a consecutive bunch of insns to a different place in the chain.
3592 The insns to be moved are those between FROM and TO.
3593 They are moved to a new position after the insn AFTER.
3594 AFTER must not be FROM or TO or any insn in between.
3596 This function does not know about SEQUENCEs and hence should not be
3597 called after delay-slot filling has been done. */
3599 void
3600 reorder_insns_nobb (from, to, after)
3601 rtx from, to, after;
3603 /* Splice this bunch out of where it is now. */
3604 if (PREV_INSN (from))
3605 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3606 if (NEXT_INSN (to))
3607 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3608 if (last_insn == to)
3609 last_insn = PREV_INSN (from);
3610 if (first_insn == from)
3611 first_insn = NEXT_INSN (to);
3613 /* Make the new neighbors point to it and it to them. */
3614 if (NEXT_INSN (after))
3615 PREV_INSN (NEXT_INSN (after)) = to;
3617 NEXT_INSN (to) = NEXT_INSN (after);
3618 PREV_INSN (from) = after;
3619 NEXT_INSN (after) = from;
3620 if (after == last_insn)
3621 last_insn = to;
3624 /* Same as function above, but take care to update BB boundaries. */
3625 void
3626 reorder_insns (from, to, after)
3627 rtx from, to, after;
3629 rtx prev = PREV_INSN (from);
3630 basic_block bb, bb2;
3632 reorder_insns_nobb (from, to, after);
3634 if (GET_CODE (after) != BARRIER
3635 && (bb = BLOCK_FOR_INSN (after)))
3637 rtx x;
3638 bb->flags |= BB_DIRTY;
3640 if (GET_CODE (from) != BARRIER
3641 && (bb2 = BLOCK_FOR_INSN (from)))
3643 if (bb2->end == to)
3644 bb2->end = prev;
3645 bb2->flags |= BB_DIRTY;
3648 if (bb->end == after)
3649 bb->end = to;
3651 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3652 set_block_for_insn (x, bb);
3656 /* Return the line note insn preceding INSN. */
3658 static rtx
3659 find_line_note (insn)
3660 rtx insn;
3662 if (no_line_numbers)
3663 return 0;
3665 for (; insn; insn = PREV_INSN (insn))
3666 if (GET_CODE (insn) == NOTE
3667 && NOTE_LINE_NUMBER (insn) >= 0)
3668 break;
3670 return insn;
3673 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3674 of the moved insns when debugging. This may insert a note between AFTER
3675 and FROM, and another one after TO. */
3677 void
3678 reorder_insns_with_line_notes (from, to, after)
3679 rtx from, to, after;
3681 rtx from_line = find_line_note (from);
3682 rtx after_line = find_line_note (after);
3684 reorder_insns (from, to, after);
3686 if (from_line == after_line)
3687 return;
3689 if (from_line)
3690 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3691 NOTE_LINE_NUMBER (from_line),
3692 after);
3693 if (after_line)
3694 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3695 NOTE_LINE_NUMBER (after_line),
3696 to);
3699 /* Remove unnecessary notes from the instruction stream. */
3701 void
3702 remove_unnecessary_notes ()
3704 rtx block_stack = NULL_RTX;
3705 rtx eh_stack = NULL_RTX;
3706 rtx insn;
3707 rtx next;
3708 rtx tmp;
3710 /* We must not remove the first instruction in the function because
3711 the compiler depends on the first instruction being a note. */
3712 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3714 /* Remember what's next. */
3715 next = NEXT_INSN (insn);
3717 /* We're only interested in notes. */
3718 if (GET_CODE (insn) != NOTE)
3719 continue;
3721 switch (NOTE_LINE_NUMBER (insn))
3723 case NOTE_INSN_DELETED:
3724 case NOTE_INSN_LOOP_END_TOP_COND:
3725 remove_insn (insn);
3726 break;
3728 case NOTE_INSN_EH_REGION_BEG:
3729 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3730 break;
3732 case NOTE_INSN_EH_REGION_END:
3733 /* Too many end notes. */
3734 if (eh_stack == NULL_RTX)
3735 abort ();
3736 /* Mismatched nesting. */
3737 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3738 abort ();
3739 tmp = eh_stack;
3740 eh_stack = XEXP (eh_stack, 1);
3741 free_INSN_LIST_node (tmp);
3742 break;
3744 case NOTE_INSN_BLOCK_BEG:
3745 /* By now, all notes indicating lexical blocks should have
3746 NOTE_BLOCK filled in. */
3747 if (NOTE_BLOCK (insn) == NULL_TREE)
3748 abort ();
3749 block_stack = alloc_INSN_LIST (insn, block_stack);
3750 break;
3752 case NOTE_INSN_BLOCK_END:
3753 /* Too many end notes. */
3754 if (block_stack == NULL_RTX)
3755 abort ();
3756 /* Mismatched nesting. */
3757 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3758 abort ();
3759 tmp = block_stack;
3760 block_stack = XEXP (block_stack, 1);
3761 free_INSN_LIST_node (tmp);
3763 /* Scan back to see if there are any non-note instructions
3764 between INSN and the beginning of this block. If not,
3765 then there is no PC range in the generated code that will
3766 actually be in this block, so there's no point in
3767 remembering the existence of the block. */
3768 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3770 /* This block contains a real instruction. Note that we
3771 don't include labels; if the only thing in the block
3772 is a label, then there are still no PC values that
3773 lie within the block. */
3774 if (INSN_P (tmp))
3775 break;
3777 /* We're only interested in NOTEs. */
3778 if (GET_CODE (tmp) != NOTE)
3779 continue;
3781 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3783 /* We just verified that this BLOCK matches us with
3784 the block_stack check above. Never delete the
3785 BLOCK for the outermost scope of the function; we
3786 can refer to names from that scope even if the
3787 block notes are messed up. */
3788 if (! is_body_block (NOTE_BLOCK (insn))
3789 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3791 remove_insn (tmp);
3792 remove_insn (insn);
3794 break;
3796 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3797 /* There's a nested block. We need to leave the
3798 current block in place since otherwise the debugger
3799 wouldn't be able to show symbols from our block in
3800 the nested block. */
3801 break;
3806 /* Too many begin notes. */
3807 if (block_stack || eh_stack)
3808 abort ();
3812 /* Emit insn(s) of given code and pattern
3813 at a specified place within the doubly-linked list.
3815 All of the emit_foo global entry points accept an object
3816 X which is either an insn list or a PATTERN of a single
3817 instruction.
3819 There are thus a few canonical ways to generate code and
3820 emit it at a specific place in the instruction stream. For
3821 example, consider the instruction named SPOT and the fact that
3822 we would like to emit some instructions before SPOT. We might
3823 do it like this:
3825 start_sequence ();
3826 ... emit the new instructions ...
3827 insns_head = get_insns ();
3828 end_sequence ();
3830 emit_insn_before (insns_head, SPOT);
3832 It used to be common to generate SEQUENCE rtl instead, but that
3833 is a relic of the past which no longer occurs. The reason is that
3834 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3835 generated would almost certainly die right after it was created. */
3837 /* Make X be output before the instruction BEFORE. */
3840 emit_insn_before (x, before)
3841 rtx x, before;
3843 rtx last = before;
3844 rtx insn;
3846 #ifdef ENABLE_RTL_CHECKING
3847 if (before == NULL_RTX)
3848 abort ();
3849 #endif
3851 if (x == NULL_RTX)
3852 return last;
3854 switch (GET_CODE (x))
3856 case INSN:
3857 case JUMP_INSN:
3858 case CALL_INSN:
3859 case CODE_LABEL:
3860 case BARRIER:
3861 case NOTE:
3862 insn = x;
3863 while (insn)
3865 rtx next = NEXT_INSN (insn);
3866 add_insn_before (insn, before);
3867 last = insn;
3868 insn = next;
3870 break;
3872 #ifdef ENABLE_RTL_CHECKING
3873 case SEQUENCE:
3874 abort ();
3875 break;
3876 #endif
3878 default:
3879 last = make_insn_raw (x);
3880 add_insn_before (last, before);
3881 break;
3884 return last;
3887 /* Make an instruction with body X and code JUMP_INSN
3888 and output it before the instruction BEFORE. */
3891 emit_jump_insn_before (x, before)
3892 rtx x, before;
3894 rtx insn, last;
3896 #ifdef ENABLE_RTL_CHECKING
3897 if (before == NULL_RTX)
3898 abort ();
3899 #endif
3901 switch (GET_CODE (x))
3903 case INSN:
3904 case JUMP_INSN:
3905 case CALL_INSN:
3906 case CODE_LABEL:
3907 case BARRIER:
3908 case NOTE:
3909 insn = x;
3910 while (insn)
3912 rtx next = NEXT_INSN (insn);
3913 add_insn_before (insn, before);
3914 last = insn;
3915 insn = next;
3917 break;
3919 #ifdef ENABLE_RTL_CHECKING
3920 case SEQUENCE:
3921 abort ();
3922 break;
3923 #endif
3925 default:
3926 last = make_jump_insn_raw (x);
3927 add_insn_before (last, before);
3928 break;
3931 return last;
3934 /* Make an instruction with body X and code CALL_INSN
3935 and output it before the instruction BEFORE. */
3938 emit_call_insn_before (x, before)
3939 rtx x, before;
3941 rtx last, insn;
3943 #ifdef ENABLE_RTL_CHECKING
3944 if (before == NULL_RTX)
3945 abort ();
3946 #endif
3948 switch (GET_CODE (x))
3950 case INSN:
3951 case JUMP_INSN:
3952 case CALL_INSN:
3953 case CODE_LABEL:
3954 case BARRIER:
3955 case NOTE:
3956 insn = x;
3957 while (insn)
3959 rtx next = NEXT_INSN (insn);
3960 add_insn_before (insn, before);
3961 last = insn;
3962 insn = next;
3964 break;
3966 #ifdef ENABLE_RTL_CHECKING
3967 case SEQUENCE:
3968 abort ();
3969 break;
3970 #endif
3972 default:
3973 last = make_call_insn_raw (x);
3974 add_insn_before (last, before);
3975 break;
3978 return last;
3981 /* Make an insn of code BARRIER
3982 and output it before the insn BEFORE. */
3985 emit_barrier_before (before)
3986 rtx before;
3988 rtx insn = rtx_alloc (BARRIER);
3990 INSN_UID (insn) = cur_insn_uid++;
3992 add_insn_before (insn, before);
3993 return insn;
3996 /* Emit the label LABEL before the insn BEFORE. */
3999 emit_label_before (label, before)
4000 rtx label, before;
4002 /* This can be called twice for the same label as a result of the
4003 confusion that follows a syntax error! So make it harmless. */
4004 if (INSN_UID (label) == 0)
4006 INSN_UID (label) = cur_insn_uid++;
4007 add_insn_before (label, before);
4010 return label;
4013 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4016 emit_note_before (subtype, before)
4017 int subtype;
4018 rtx before;
4020 rtx note = rtx_alloc (NOTE);
4021 INSN_UID (note) = cur_insn_uid++;
4022 NOTE_SOURCE_FILE (note) = 0;
4023 NOTE_LINE_NUMBER (note) = subtype;
4024 BLOCK_FOR_INSN (note) = NULL;
4026 add_insn_before (note, before);
4027 return note;
4030 /* Helper for emit_insn_after, handles lists of instructions
4031 efficiently. */
4033 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4035 static rtx
4036 emit_insn_after_1 (first, after)
4037 rtx first, after;
4039 rtx last;
4040 rtx after_after;
4041 basic_block bb;
4043 if (GET_CODE (after) != BARRIER
4044 && (bb = BLOCK_FOR_INSN (after)))
4046 bb->flags |= BB_DIRTY;
4047 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4048 if (GET_CODE (last) != BARRIER)
4049 set_block_for_insn (last, bb);
4050 if (GET_CODE (last) != BARRIER)
4051 set_block_for_insn (last, bb);
4052 if (bb->end == after)
4053 bb->end = last;
4055 else
4056 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4057 continue;
4059 after_after = NEXT_INSN (after);
4061 NEXT_INSN (after) = first;
4062 PREV_INSN (first) = after;
4063 NEXT_INSN (last) = after_after;
4064 if (after_after)
4065 PREV_INSN (after_after) = last;
4067 if (after == last_insn)
4068 last_insn = last;
4069 return last;
4072 /* Make X be output after the insn AFTER. */
4075 emit_insn_after (x, after)
4076 rtx x, after;
4078 rtx last = after;
4080 #ifdef ENABLE_RTL_CHECKING
4081 if (after == NULL_RTX)
4082 abort ();
4083 #endif
4085 if (x == NULL_RTX)
4086 return last;
4088 switch (GET_CODE (x))
4090 case INSN:
4091 case JUMP_INSN:
4092 case CALL_INSN:
4093 case CODE_LABEL:
4094 case BARRIER:
4095 case NOTE:
4096 last = emit_insn_after_1 (x, after);
4097 break;
4099 #ifdef ENABLE_RTL_CHECKING
4100 case SEQUENCE:
4101 abort ();
4102 break;
4103 #endif
4105 default:
4106 last = make_insn_raw (x);
4107 add_insn_after (last, after);
4108 break;
4111 return last;
4114 /* Similar to emit_insn_after, except that line notes are to be inserted so
4115 as to act as if this insn were at FROM. */
4117 void
4118 emit_insn_after_with_line_notes (x, after, from)
4119 rtx x, after, from;
4121 rtx from_line = find_line_note (from);
4122 rtx after_line = find_line_note (after);
4123 rtx insn = emit_insn_after (x, after);
4125 if (from_line)
4126 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4127 NOTE_LINE_NUMBER (from_line),
4128 after);
4130 if (after_line)
4131 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4132 NOTE_LINE_NUMBER (after_line),
4133 insn);
4136 /* Make an insn of code JUMP_INSN with body X
4137 and output it after the insn AFTER. */
4140 emit_jump_insn_after (x, after)
4141 rtx x, after;
4143 rtx last;
4145 #ifdef ENABLE_RTL_CHECKING
4146 if (after == NULL_RTX)
4147 abort ();
4148 #endif
4150 switch (GET_CODE (x))
4152 case INSN:
4153 case JUMP_INSN:
4154 case CALL_INSN:
4155 case CODE_LABEL:
4156 case BARRIER:
4157 case NOTE:
4158 last = emit_insn_after_1 (x, after);
4159 break;
4161 #ifdef ENABLE_RTL_CHECKING
4162 case SEQUENCE:
4163 abort ();
4164 break;
4165 #endif
4167 default:
4168 last = make_jump_insn_raw (x);
4169 add_insn_after (last, after);
4170 break;
4173 return last;
4176 /* Make an instruction with body X and code CALL_INSN
4177 and output it after the instruction AFTER. */
4180 emit_call_insn_after (x, after)
4181 rtx x, after;
4183 rtx last;
4185 #ifdef ENABLE_RTL_CHECKING
4186 if (after == NULL_RTX)
4187 abort ();
4188 #endif
4190 switch (GET_CODE (x))
4192 case INSN:
4193 case JUMP_INSN:
4194 case CALL_INSN:
4195 case CODE_LABEL:
4196 case BARRIER:
4197 case NOTE:
4198 last = emit_insn_after_1 (x, after);
4199 break;
4201 #ifdef ENABLE_RTL_CHECKING
4202 case SEQUENCE:
4203 abort ();
4204 break;
4205 #endif
4207 default:
4208 last = make_call_insn_raw (x);
4209 add_insn_after (last, after);
4210 break;
4213 return last;
4216 /* Make an insn of code BARRIER
4217 and output it after the insn AFTER. */
4220 emit_barrier_after (after)
4221 rtx after;
4223 rtx insn = rtx_alloc (BARRIER);
4225 INSN_UID (insn) = cur_insn_uid++;
4227 add_insn_after (insn, after);
4228 return insn;
4231 /* Emit the label LABEL after the insn AFTER. */
4234 emit_label_after (label, after)
4235 rtx label, after;
4237 /* This can be called twice for the same label
4238 as a result of the confusion that follows a syntax error!
4239 So make it harmless. */
4240 if (INSN_UID (label) == 0)
4242 INSN_UID (label) = cur_insn_uid++;
4243 add_insn_after (label, after);
4246 return label;
4249 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4252 emit_note_after (subtype, after)
4253 int subtype;
4254 rtx after;
4256 rtx note = rtx_alloc (NOTE);
4257 INSN_UID (note) = cur_insn_uid++;
4258 NOTE_SOURCE_FILE (note) = 0;
4259 NOTE_LINE_NUMBER (note) = subtype;
4260 BLOCK_FOR_INSN (note) = NULL;
4261 add_insn_after (note, after);
4262 return note;
4265 /* Emit a line note for FILE and LINE after the insn AFTER. */
4268 emit_line_note_after (file, line, after)
4269 const char *file;
4270 int line;
4271 rtx after;
4273 rtx note;
4275 if (no_line_numbers && line > 0)
4277 cur_insn_uid++;
4278 return 0;
4281 note = rtx_alloc (NOTE);
4282 INSN_UID (note) = cur_insn_uid++;
4283 NOTE_SOURCE_FILE (note) = file;
4284 NOTE_LINE_NUMBER (note) = line;
4285 BLOCK_FOR_INSN (note) = NULL;
4286 add_insn_after (note, after);
4287 return note;
4290 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4292 emit_insn_after_scope (pattern, after, scope)
4293 rtx pattern, after;
4294 tree scope;
4296 rtx last = emit_insn_after (pattern, after);
4298 after = NEXT_INSN (after);
4299 while (1)
4301 INSN_SCOPE (after) = scope;
4302 if (after == last)
4303 break;
4304 after = NEXT_INSN (after);
4306 return last;
4309 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4311 emit_jump_insn_after_scope (pattern, after, scope)
4312 rtx pattern, after;
4313 tree scope;
4315 rtx last = emit_jump_insn_after (pattern, after);
4317 after = NEXT_INSN (after);
4318 while (1)
4320 INSN_SCOPE (after) = scope;
4321 if (after == last)
4322 break;
4323 after = NEXT_INSN (after);
4325 return last;
4328 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4330 emit_call_insn_after_scope (pattern, after, scope)
4331 rtx pattern, after;
4332 tree scope;
4334 rtx last = emit_call_insn_after (pattern, after);
4336 after = NEXT_INSN (after);
4337 while (1)
4339 INSN_SCOPE (after) = scope;
4340 if (after == last)
4341 break;
4342 after = NEXT_INSN (after);
4344 return last;
4347 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4349 emit_insn_before_scope (pattern, before, scope)
4350 rtx pattern, before;
4351 tree scope;
4353 rtx first = PREV_INSN (before);
4354 rtx last = emit_insn_before (pattern, before);
4356 first = NEXT_INSN (first);
4357 while (1)
4359 INSN_SCOPE (first) = scope;
4360 if (first == last)
4361 break;
4362 first = NEXT_INSN (first);
4364 return last;
4367 /* Take X and emit it at the end of the doubly-linked
4368 INSN list.
4370 Returns the last insn emitted. */
4373 emit_insn (x)
4374 rtx x;
4376 rtx last = last_insn;
4377 rtx insn;
4379 if (x == NULL_RTX)
4380 return last;
4382 switch (GET_CODE (x))
4384 case INSN:
4385 case JUMP_INSN:
4386 case CALL_INSN:
4387 case CODE_LABEL:
4388 case BARRIER:
4389 case NOTE:
4390 insn = x;
4391 while (insn)
4393 rtx next = NEXT_INSN (insn);
4394 add_insn (insn);
4395 last = insn;
4396 insn = next;
4398 break;
4400 #ifdef ENABLE_RTL_CHECKING
4401 case SEQUENCE:
4402 abort ();
4403 break;
4404 #endif
4406 default:
4407 last = make_insn_raw (x);
4408 add_insn (last);
4409 break;
4412 return last;
4415 /* Make an insn of code JUMP_INSN with pattern X
4416 and add it to the end of the doubly-linked list. */
4419 emit_jump_insn (x)
4420 rtx x;
4422 rtx last, insn;
4424 switch (GET_CODE (x))
4426 case INSN:
4427 case JUMP_INSN:
4428 case CALL_INSN:
4429 case CODE_LABEL:
4430 case BARRIER:
4431 case NOTE:
4432 insn = x;
4433 while (insn)
4435 rtx next = NEXT_INSN (insn);
4436 add_insn (insn);
4437 last = insn;
4438 insn = next;
4440 break;
4442 #ifdef ENABLE_RTL_CHECKING
4443 case SEQUENCE:
4444 abort ();
4445 break;
4446 #endif
4448 default:
4449 last = make_jump_insn_raw (x);
4450 add_insn (last);
4451 break;
4454 return last;
4457 /* Make an insn of code CALL_INSN with pattern X
4458 and add it to the end of the doubly-linked list. */
4461 emit_call_insn (x)
4462 rtx x;
4464 rtx insn;
4466 switch (GET_CODE (x))
4468 case INSN:
4469 case JUMP_INSN:
4470 case CALL_INSN:
4471 case CODE_LABEL:
4472 case BARRIER:
4473 case NOTE:
4474 insn = emit_insn (x);
4475 break;
4477 #ifdef ENABLE_RTL_CHECKING
4478 case SEQUENCE:
4479 abort ();
4480 break;
4481 #endif
4483 default:
4484 insn = make_call_insn_raw (x);
4485 add_insn (insn);
4486 break;
4489 return insn;
4492 /* Add the label LABEL to the end of the doubly-linked list. */
4495 emit_label (label)
4496 rtx label;
4498 /* This can be called twice for the same label
4499 as a result of the confusion that follows a syntax error!
4500 So make it harmless. */
4501 if (INSN_UID (label) == 0)
4503 INSN_UID (label) = cur_insn_uid++;
4504 add_insn (label);
4506 return label;
4509 /* Make an insn of code BARRIER
4510 and add it to the end of the doubly-linked list. */
4513 emit_barrier ()
4515 rtx barrier = rtx_alloc (BARRIER);
4516 INSN_UID (barrier) = cur_insn_uid++;
4517 add_insn (barrier);
4518 return barrier;
4521 /* Make an insn of code NOTE
4522 with data-fields specified by FILE and LINE
4523 and add it to the end of the doubly-linked list,
4524 but only if line-numbers are desired for debugging info. */
4527 emit_line_note (file, line)
4528 const char *file;
4529 int line;
4531 set_file_and_line_for_stmt (file, line);
4533 #if 0
4534 if (no_line_numbers)
4535 return 0;
4536 #endif
4538 return emit_note (file, line);
4541 /* Make an insn of code NOTE
4542 with data-fields specified by FILE and LINE
4543 and add it to the end of the doubly-linked list.
4544 If it is a line-number NOTE, omit it if it matches the previous one. */
4547 emit_note (file, line)
4548 const char *file;
4549 int line;
4551 rtx note;
4553 if (line > 0)
4555 if (file && last_filename && !strcmp (file, last_filename)
4556 && line == last_linenum)
4557 return 0;
4558 last_filename = file;
4559 last_linenum = line;
4562 if (no_line_numbers && line > 0)
4564 cur_insn_uid++;
4565 return 0;
4568 note = rtx_alloc (NOTE);
4569 INSN_UID (note) = cur_insn_uid++;
4570 NOTE_SOURCE_FILE (note) = file;
4571 NOTE_LINE_NUMBER (note) = line;
4572 BLOCK_FOR_INSN (note) = NULL;
4573 add_insn (note);
4574 return note;
4577 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4580 emit_line_note_force (file, line)
4581 const char *file;
4582 int line;
4584 last_linenum = -1;
4585 return emit_line_note (file, line);
4588 /* Cause next statement to emit a line note even if the line number
4589 has not changed. This is used at the beginning of a function. */
4591 void
4592 force_next_line_note ()
4594 last_linenum = -1;
4597 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4598 note of this type already exists, remove it first. */
4601 set_unique_reg_note (insn, kind, datum)
4602 rtx insn;
4603 enum reg_note kind;
4604 rtx datum;
4606 rtx note = find_reg_note (insn, kind, NULL_RTX);
4608 switch (kind)
4610 case REG_EQUAL:
4611 case REG_EQUIV:
4612 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4613 has multiple sets (some callers assume single_set
4614 means the insn only has one set, when in fact it
4615 means the insn only has one * useful * set). */
4616 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4618 if (note)
4619 abort ();
4620 return NULL_RTX;
4623 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4624 It serves no useful purpose and breaks eliminate_regs. */
4625 if (GET_CODE (datum) == ASM_OPERANDS)
4626 return NULL_RTX;
4627 break;
4629 default:
4630 break;
4633 if (note)
4635 XEXP (note, 0) = datum;
4636 return note;
4639 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4640 return REG_NOTES (insn);
4643 /* Return an indication of which type of insn should have X as a body.
4644 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4646 enum rtx_code
4647 classify_insn (x)
4648 rtx x;
4650 if (GET_CODE (x) == CODE_LABEL)
4651 return CODE_LABEL;
4652 if (GET_CODE (x) == CALL)
4653 return CALL_INSN;
4654 if (GET_CODE (x) == RETURN)
4655 return JUMP_INSN;
4656 if (GET_CODE (x) == SET)
4658 if (SET_DEST (x) == pc_rtx)
4659 return JUMP_INSN;
4660 else if (GET_CODE (SET_SRC (x)) == CALL)
4661 return CALL_INSN;
4662 else
4663 return INSN;
4665 if (GET_CODE (x) == PARALLEL)
4667 int j;
4668 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4669 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4670 return CALL_INSN;
4671 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4672 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4673 return JUMP_INSN;
4674 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4675 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4676 return CALL_INSN;
4678 return INSN;
4681 /* Emit the rtl pattern X as an appropriate kind of insn.
4682 If X is a label, it is simply added into the insn chain. */
4685 emit (x)
4686 rtx x;
4688 enum rtx_code code = classify_insn (x);
4690 if (code == CODE_LABEL)
4691 return emit_label (x);
4692 else if (code == INSN)
4693 return emit_insn (x);
4694 else if (code == JUMP_INSN)
4696 rtx insn = emit_jump_insn (x);
4697 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4698 return emit_barrier ();
4699 return insn;
4701 else if (code == CALL_INSN)
4702 return emit_call_insn (x);
4703 else
4704 abort ();
4707 /* Space for free sequence stack entries. */
4708 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4710 /* Begin emitting insns to a sequence which can be packaged in an
4711 RTL_EXPR. If this sequence will contain something that might cause
4712 the compiler to pop arguments to function calls (because those
4713 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4714 details), use do_pending_stack_adjust before calling this function.
4715 That will ensure that the deferred pops are not accidentally
4716 emitted in the middle of this sequence. */
4718 void
4719 start_sequence ()
4721 struct sequence_stack *tem;
4723 if (free_sequence_stack != NULL)
4725 tem = free_sequence_stack;
4726 free_sequence_stack = tem->next;
4728 else
4729 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4731 tem->next = seq_stack;
4732 tem->first = first_insn;
4733 tem->last = last_insn;
4734 tem->sequence_rtl_expr = seq_rtl_expr;
4736 seq_stack = tem;
4738 first_insn = 0;
4739 last_insn = 0;
4742 /* Similarly, but indicate that this sequence will be placed in T, an
4743 RTL_EXPR. See the documentation for start_sequence for more
4744 information about how to use this function. */
4746 void
4747 start_sequence_for_rtl_expr (t)
4748 tree t;
4750 start_sequence ();
4752 seq_rtl_expr = t;
4755 /* Set up the insn chain starting with FIRST as the current sequence,
4756 saving the previously current one. See the documentation for
4757 start_sequence for more information about how to use this function. */
4759 void
4760 push_to_sequence (first)
4761 rtx first;
4763 rtx last;
4765 start_sequence ();
4767 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4769 first_insn = first;
4770 last_insn = last;
4773 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4775 void
4776 push_to_full_sequence (first, last)
4777 rtx first, last;
4779 start_sequence ();
4780 first_insn = first;
4781 last_insn = last;
4782 /* We really should have the end of the insn chain here. */
4783 if (last && NEXT_INSN (last))
4784 abort ();
4787 /* Set up the outer-level insn chain
4788 as the current sequence, saving the previously current one. */
4790 void
4791 push_topmost_sequence ()
4793 struct sequence_stack *stack, *top = NULL;
4795 start_sequence ();
4797 for (stack = seq_stack; stack; stack = stack->next)
4798 top = stack;
4800 first_insn = top->first;
4801 last_insn = top->last;
4802 seq_rtl_expr = top->sequence_rtl_expr;
4805 /* After emitting to the outer-level insn chain, update the outer-level
4806 insn chain, and restore the previous saved state. */
4808 void
4809 pop_topmost_sequence ()
4811 struct sequence_stack *stack, *top = NULL;
4813 for (stack = seq_stack; stack; stack = stack->next)
4814 top = stack;
4816 top->first = first_insn;
4817 top->last = last_insn;
4818 /* ??? Why don't we save seq_rtl_expr here? */
4820 end_sequence ();
4823 /* After emitting to a sequence, restore previous saved state.
4825 To get the contents of the sequence just made, you must call
4826 `get_insns' *before* calling here.
4828 If the compiler might have deferred popping arguments while
4829 generating this sequence, and this sequence will not be immediately
4830 inserted into the instruction stream, use do_pending_stack_adjust
4831 before calling get_insns. That will ensure that the deferred
4832 pops are inserted into this sequence, and not into some random
4833 location in the instruction stream. See INHIBIT_DEFER_POP for more
4834 information about deferred popping of arguments. */
4836 void
4837 end_sequence ()
4839 struct sequence_stack *tem = seq_stack;
4841 first_insn = tem->first;
4842 last_insn = tem->last;
4843 seq_rtl_expr = tem->sequence_rtl_expr;
4844 seq_stack = tem->next;
4846 memset (tem, 0, sizeof (*tem));
4847 tem->next = free_sequence_stack;
4848 free_sequence_stack = tem;
4851 /* This works like end_sequence, but records the old sequence in FIRST
4852 and LAST. */
4854 void
4855 end_full_sequence (first, last)
4856 rtx *first, *last;
4858 *first = first_insn;
4859 *last = last_insn;
4860 end_sequence ();
4863 /* Return 1 if currently emitting into a sequence. */
4866 in_sequence_p ()
4868 return seq_stack != 0;
4871 /* Put the various virtual registers into REGNO_REG_RTX. */
4873 void
4874 init_virtual_regs (es)
4875 struct emit_status *es;
4877 rtx *ptr = es->x_regno_reg_rtx;
4878 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4879 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4880 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4881 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4882 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4886 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4887 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4888 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4889 static int copy_insn_n_scratches;
4891 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4892 copied an ASM_OPERANDS.
4893 In that case, it is the original input-operand vector. */
4894 static rtvec orig_asm_operands_vector;
4896 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4897 copied an ASM_OPERANDS.
4898 In that case, it is the copied input-operand vector. */
4899 static rtvec copy_asm_operands_vector;
4901 /* Likewise for the constraints vector. */
4902 static rtvec orig_asm_constraints_vector;
4903 static rtvec copy_asm_constraints_vector;
4905 /* Recursively create a new copy of an rtx for copy_insn.
4906 This function differs from copy_rtx in that it handles SCRATCHes and
4907 ASM_OPERANDs properly.
4908 Normally, this function is not used directly; use copy_insn as front end.
4909 However, you could first copy an insn pattern with copy_insn and then use
4910 this function afterwards to properly copy any REG_NOTEs containing
4911 SCRATCHes. */
4914 copy_insn_1 (orig)
4915 rtx orig;
4917 rtx copy;
4918 int i, j;
4919 RTX_CODE code;
4920 const char *format_ptr;
4922 code = GET_CODE (orig);
4924 switch (code)
4926 case REG:
4927 case QUEUED:
4928 case CONST_INT:
4929 case CONST_DOUBLE:
4930 case CONST_VECTOR:
4931 case SYMBOL_REF:
4932 case CODE_LABEL:
4933 case PC:
4934 case CC0:
4935 case ADDRESSOF:
4936 return orig;
4938 case SCRATCH:
4939 for (i = 0; i < copy_insn_n_scratches; i++)
4940 if (copy_insn_scratch_in[i] == orig)
4941 return copy_insn_scratch_out[i];
4942 break;
4944 case CONST:
4945 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4946 a LABEL_REF, it isn't sharable. */
4947 if (GET_CODE (XEXP (orig, 0)) == PLUS
4948 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4949 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4950 return orig;
4951 break;
4953 /* A MEM with a constant address is not sharable. The problem is that
4954 the constant address may need to be reloaded. If the mem is shared,
4955 then reloading one copy of this mem will cause all copies to appear
4956 to have been reloaded. */
4958 default:
4959 break;
4962 copy = rtx_alloc (code);
4964 /* Copy the various flags, and other information. We assume that
4965 all fields need copying, and then clear the fields that should
4966 not be copied. That is the sensible default behavior, and forces
4967 us to explicitly document why we are *not* copying a flag. */
4968 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4970 /* We do not copy the USED flag, which is used as a mark bit during
4971 walks over the RTL. */
4972 RTX_FLAG (copy, used) = 0;
4974 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4975 if (GET_RTX_CLASS (code) == 'i')
4977 RTX_FLAG (copy, jump) = 0;
4978 RTX_FLAG (copy, call) = 0;
4979 RTX_FLAG (copy, frame_related) = 0;
4982 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4984 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4986 copy->fld[i] = orig->fld[i];
4987 switch (*format_ptr++)
4989 case 'e':
4990 if (XEXP (orig, i) != NULL)
4991 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4992 break;
4994 case 'E':
4995 case 'V':
4996 if (XVEC (orig, i) == orig_asm_constraints_vector)
4997 XVEC (copy, i) = copy_asm_constraints_vector;
4998 else if (XVEC (orig, i) == orig_asm_operands_vector)
4999 XVEC (copy, i) = copy_asm_operands_vector;
5000 else if (XVEC (orig, i) != NULL)
5002 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5003 for (j = 0; j < XVECLEN (copy, i); j++)
5004 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5006 break;
5008 case 't':
5009 case 'w':
5010 case 'i':
5011 case 's':
5012 case 'S':
5013 case 'u':
5014 case '0':
5015 /* These are left unchanged. */
5016 break;
5018 default:
5019 abort ();
5023 if (code == SCRATCH)
5025 i = copy_insn_n_scratches++;
5026 if (i >= MAX_RECOG_OPERANDS)
5027 abort ();
5028 copy_insn_scratch_in[i] = orig;
5029 copy_insn_scratch_out[i] = copy;
5031 else if (code == ASM_OPERANDS)
5033 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5034 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5035 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5036 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5039 return copy;
5042 /* Create a new copy of an rtx.
5043 This function differs from copy_rtx in that it handles SCRATCHes and
5044 ASM_OPERANDs properly.
5045 INSN doesn't really have to be a full INSN; it could be just the
5046 pattern. */
5048 copy_insn (insn)
5049 rtx insn;
5051 copy_insn_n_scratches = 0;
5052 orig_asm_operands_vector = 0;
5053 orig_asm_constraints_vector = 0;
5054 copy_asm_operands_vector = 0;
5055 copy_asm_constraints_vector = 0;
5056 return copy_insn_1 (insn);
5059 /* Initialize data structures and variables in this file
5060 before generating rtl for each function. */
5062 void
5063 init_emit ()
5065 struct function *f = cfun;
5066 int i;
5068 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5069 first_insn = NULL;
5070 last_insn = NULL;
5071 seq_rtl_expr = NULL;
5072 cur_insn_uid = 1;
5073 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5074 last_linenum = 0;
5075 last_filename = 0;
5076 first_label_num = label_num;
5077 last_label_num = 0;
5078 seq_stack = NULL;
5080 /* Init the tables that describe all the pseudo regs. */
5082 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5084 f->emit->regno_pointer_align
5085 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5086 * sizeof (unsigned char));
5088 regno_reg_rtx
5089 = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5090 * sizeof (rtx));
5092 f->emit->regno_decl
5093 = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5094 * sizeof (tree));
5096 /* Put copies of all the hard registers into regno_reg_rtx. */
5097 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5098 regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5100 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5101 init_virtual_regs (f->emit);
5104 /* Indicate that the virtual registers and stack locations are
5105 all pointers. */
5106 REG_POINTER (stack_pointer_rtx) = 1;
5107 REG_POINTER (frame_pointer_rtx) = 1;
5108 REG_POINTER (hard_frame_pointer_rtx) = 1;
5109 REG_POINTER (arg_pointer_rtx) = 1;
5111 REG_POINTER (virtual_incoming_args_rtx) = 1;
5112 REG_POINTER (virtual_stack_vars_rtx) = 1;
5113 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5114 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5115 REG_POINTER (virtual_cfa_rtx) = 1;
5117 #ifdef STACK_BOUNDARY
5118 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5119 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5120 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5121 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5123 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5124 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5125 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5126 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5127 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5128 #endif
5130 #ifdef INIT_EXPANDERS
5131 INIT_EXPANDERS;
5132 #endif
5135 /* Generate the constant 0. */
5137 static rtx
5138 gen_const_vector_0 (mode)
5139 enum machine_mode mode;
5141 rtx tem;
5142 rtvec v;
5143 int units, i;
5144 enum machine_mode inner;
5146 units = GET_MODE_NUNITS (mode);
5147 inner = GET_MODE_INNER (mode);
5149 v = rtvec_alloc (units);
5151 /* We need to call this function after we to set CONST0_RTX first. */
5152 if (!CONST0_RTX (inner))
5153 abort ();
5155 for (i = 0; i < units; ++i)
5156 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5158 tem = gen_rtx_CONST_VECTOR (mode, v);
5159 return tem;
5162 /* Create some permanent unique rtl objects shared between all functions.
5163 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5165 void
5166 init_emit_once (line_numbers)
5167 int line_numbers;
5169 int i;
5170 enum machine_mode mode;
5171 enum machine_mode double_mode;
5173 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5174 tables. */
5175 const_int_htab = htab_create (37, const_int_htab_hash,
5176 const_int_htab_eq, NULL);
5178 const_double_htab = htab_create (37, const_double_htab_hash,
5179 const_double_htab_eq, NULL);
5181 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5182 mem_attrs_htab_eq, NULL);
5184 no_line_numbers = ! line_numbers;
5186 /* Compute the word and byte modes. */
5188 byte_mode = VOIDmode;
5189 word_mode = VOIDmode;
5190 double_mode = VOIDmode;
5192 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5193 mode = GET_MODE_WIDER_MODE (mode))
5195 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5196 && byte_mode == VOIDmode)
5197 byte_mode = mode;
5199 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5200 && word_mode == VOIDmode)
5201 word_mode = mode;
5204 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5205 mode = GET_MODE_WIDER_MODE (mode))
5207 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5208 && double_mode == VOIDmode)
5209 double_mode = mode;
5212 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5214 /* Assign register numbers to the globally defined register rtx.
5215 This must be done at runtime because the register number field
5216 is in a union and some compilers can't initialize unions. */
5218 pc_rtx = gen_rtx (PC, VOIDmode);
5219 cc0_rtx = gen_rtx (CC0, VOIDmode);
5220 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5221 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5222 if (hard_frame_pointer_rtx == 0)
5223 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5224 HARD_FRAME_POINTER_REGNUM);
5225 if (arg_pointer_rtx == 0)
5226 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5227 virtual_incoming_args_rtx =
5228 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5229 virtual_stack_vars_rtx =
5230 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5231 virtual_stack_dynamic_rtx =
5232 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5233 virtual_outgoing_args_rtx =
5234 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5235 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5237 #ifdef INIT_EXPANDERS
5238 /* This is to initialize {init|mark|free}_machine_status before the first
5239 call to push_function_context_to. This is needed by the Chill front
5240 end which calls push_function_context_to before the first call to
5241 init_function_start. */
5242 INIT_EXPANDERS;
5243 #endif
5245 /* Create the unique rtx's for certain rtx codes and operand values. */
5247 /* Don't use gen_rtx here since gen_rtx in this case
5248 tries to use these variables. */
5249 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5250 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5251 gen_rtx_raw_CONST_INT (VOIDmode, i);
5253 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5254 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5255 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5256 else
5257 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5259 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5260 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5261 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5262 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5264 for (i = 0; i <= 2; i++)
5266 REAL_VALUE_TYPE *r =
5267 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5270 mode = GET_MODE_WIDER_MODE (mode))
5271 const_tiny_rtx[i][(int) mode] =
5272 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5274 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5277 mode = GET_MODE_WIDER_MODE (mode))
5278 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5281 mode != VOIDmode;
5282 mode = GET_MODE_WIDER_MODE (mode))
5283 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5287 mode != VOIDmode;
5288 mode = GET_MODE_WIDER_MODE (mode))
5289 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5292 mode != VOIDmode;
5293 mode = GET_MODE_WIDER_MODE (mode))
5294 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5296 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5297 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5298 const_tiny_rtx[0][i] = const0_rtx;
5300 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5301 if (STORE_FLAG_VALUE == 1)
5302 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5304 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5305 return_address_pointer_rtx
5306 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5307 #endif
5309 #ifdef STRUCT_VALUE
5310 struct_value_rtx = STRUCT_VALUE;
5311 #else
5312 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5313 #endif
5315 #ifdef STRUCT_VALUE_INCOMING
5316 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5317 #else
5318 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5319 struct_value_incoming_rtx
5320 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5321 #else
5322 struct_value_incoming_rtx = struct_value_rtx;
5323 #endif
5324 #endif
5326 #ifdef STATIC_CHAIN_REGNUM
5327 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5329 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5330 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5331 static_chain_incoming_rtx
5332 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5333 else
5334 #endif
5335 static_chain_incoming_rtx = static_chain_rtx;
5336 #endif
5338 #ifdef STATIC_CHAIN
5339 static_chain_rtx = STATIC_CHAIN;
5341 #ifdef STATIC_CHAIN_INCOMING
5342 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5343 #else
5344 static_chain_incoming_rtx = static_chain_rtx;
5345 #endif
5346 #endif
5348 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5349 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5352 /* Query and clear/ restore no_line_numbers. This is used by the
5353 switch / case handling in stmt.c to give proper line numbers in
5354 warnings about unreachable code. */
5357 force_line_numbers ()
5359 int old = no_line_numbers;
5361 no_line_numbers = 0;
5362 if (old)
5363 force_next_line_note ();
5364 return old;
5367 void
5368 restore_line_number_status (old_value)
5369 int old_value;
5371 no_line_numbers = old_value;
5374 /* Produce exact duplicate of insn INSN after AFTER.
5375 Care updating of libcall regions if present. */
5378 emit_copy_of_insn_after (insn, after)
5379 rtx insn, after;
5381 rtx new;
5382 rtx note1, note2, link;
5384 switch (GET_CODE (insn))
5386 case INSN:
5387 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5388 break;
5390 case JUMP_INSN:
5391 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5392 break;
5394 case CALL_INSN:
5395 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5396 if (CALL_INSN_FUNCTION_USAGE (insn))
5397 CALL_INSN_FUNCTION_USAGE (new)
5398 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5399 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5400 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5401 break;
5403 default:
5404 abort ();
5407 /* Update LABEL_NUSES. */
5408 mark_jump_label (PATTERN (new), new, 0);
5410 INSN_SCOPE (new) = INSN_SCOPE (insn);
5412 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5413 make them. */
5414 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5415 if (REG_NOTE_KIND (link) != REG_LABEL)
5417 if (GET_CODE (link) == EXPR_LIST)
5418 REG_NOTES (new)
5419 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5420 XEXP (link, 0),
5421 REG_NOTES (new)));
5422 else
5423 REG_NOTES (new)
5424 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5425 XEXP (link, 0),
5426 REG_NOTES (new)));
5429 /* Fix the libcall sequences. */
5430 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5432 rtx p = new;
5433 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5434 p = PREV_INSN (p);
5435 XEXP (note1, 0) = p;
5436 XEXP (note2, 0) = new;
5438 return new;
5441 #include "gt-emit-rtl.h"