2002-05-17 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / emit-rtl.c
blob2e86dff09a4bdbfd5f4bc9c4e014ce8e95d406be
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
151 /* A hash table storing all CONST_DOUBLEs. */
152 static htab_t const_double_htab;
154 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
155 shortly thrown away. We use two mechanisms to prevent this waste:
157 For sizes up to 5 elements, we keep a SEQUENCE and its associated
158 rtvec for use by gen_sequence. One entry for each size is
159 sufficient because most cases are calls to gen_sequence followed by
160 immediately emitting the SEQUENCE. Reuse is safe since emitting a
161 sequence is destructive on the insn in it anyway and hence can't be
162 redone.
164 We do not bother to save this cached data over nested function calls.
165 Instead, we just reinitialize them. */
167 #define SEQUENCE_RESULT_SIZE 5
169 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
171 /* During RTL generation, we also keep a list of free INSN rtl codes. */
172 static rtx free_insn;
174 #define first_insn (cfun->emit->x_first_insn)
175 #define last_insn (cfun->emit->x_last_insn)
176 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
177 #define last_linenum (cfun->emit->x_last_linenum)
178 #define last_filename (cfun->emit->x_last_filename)
179 #define first_label_num (cfun->emit->x_first_label_num)
181 static rtx make_jump_insn_raw PARAMS ((rtx));
182 static rtx make_call_insn_raw PARAMS ((rtx));
183 static rtx find_line_note PARAMS ((rtx));
184 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
185 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
186 int));
187 static void unshare_all_rtl_1 PARAMS ((rtx));
188 static void unshare_all_decls PARAMS ((tree));
189 static void reset_used_decls PARAMS ((tree));
190 static void mark_label_nuses PARAMS ((rtx));
191 static hashval_t const_int_htab_hash PARAMS ((const void *));
192 static int const_int_htab_eq PARAMS ((const void *,
193 const void *));
194 static hashval_t const_double_htab_hash PARAMS ((const void *));
195 static int const_double_htab_eq PARAMS ((const void *,
196 const void *));
197 static rtx lookup_const_double PARAMS ((rtx));
198 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
199 static int mem_attrs_htab_eq PARAMS ((const void *,
200 const void *));
201 static void mem_attrs_mark PARAMS ((const void *));
202 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
203 rtx, unsigned int,
204 enum machine_mode));
205 static tree component_ref_for_mem_expr PARAMS ((tree));
206 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
208 /* Probability of the conditional branch currently proceeded by try_split.
209 Set to -1 otherwise. */
210 int split_branch_probability = -1;
212 /* Returns a hash code for X (which is a really a CONST_INT). */
214 static hashval_t
215 const_int_htab_hash (x)
216 const void *x;
218 return (hashval_t) INTVAL ((struct rtx_def *) x);
221 /* Returns non-zero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
223 HOST_WIDE_INT *). */
225 static int
226 const_int_htab_eq (x, y)
227 const void *x;
228 const void *y;
230 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
233 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
234 static hashval_t
235 const_double_htab_hash (x)
236 const void *x;
238 hashval_t h = 0;
239 size_t i;
240 rtx value = (rtx) x;
242 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
243 h ^= XWINT (value, i);
244 return h;
247 /* Returns non-zero if the value represented by X (really a ...)
248 is the same as that represented by Y (really a ...) */
249 static int
250 const_double_htab_eq (x, y)
251 const void *x;
252 const void *y;
254 rtx a = (rtx)x, b = (rtx)y;
255 size_t i;
257 if (GET_MODE (a) != GET_MODE (b))
258 return 0;
259 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
260 if (XWINT (a, i) != XWINT (b, i))
261 return 0;
263 return 1;
266 /* Returns a hash code for X (which is a really a mem_attrs *). */
268 static hashval_t
269 mem_attrs_htab_hash (x)
270 const void *x;
272 mem_attrs *p = (mem_attrs *) x;
274 return (p->alias ^ (p->align * 1000)
275 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
276 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
277 ^ (size_t) p->expr);
280 /* Returns non-zero if the value represented by X (which is really a
281 mem_attrs *) is the same as that given by Y (which is also really a
282 mem_attrs *). */
284 static int
285 mem_attrs_htab_eq (x, y)
286 const void *x;
287 const void *y;
289 mem_attrs *p = (mem_attrs *) x;
290 mem_attrs *q = (mem_attrs *) y;
292 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
293 && p->size == q->size && p->align == q->align);
296 /* This routine is called when we determine that we need a mem_attrs entry.
297 It marks the associated decl and RTL as being used, if present. */
299 static void
300 mem_attrs_mark (x)
301 const void *x;
303 mem_attrs *p = (mem_attrs *) x;
305 if (p->expr)
306 ggc_mark_tree (p->expr);
308 if (p->offset)
309 ggc_mark_rtx (p->offset);
311 if (p->size)
312 ggc_mark_rtx (p->size);
315 /* Allocate a new mem_attrs structure and insert it into the hash table if
316 one identical to it is not already in the table. We are doing this for
317 MEM of mode MODE. */
319 static mem_attrs *
320 get_mem_attrs (alias, expr, offset, size, align, mode)
321 HOST_WIDE_INT alias;
322 tree expr;
323 rtx offset;
324 rtx size;
325 unsigned int align;
326 enum machine_mode mode;
328 mem_attrs attrs;
329 void **slot;
331 /* If everything is the default, we can just return zero. */
332 if (alias == 0 && expr == 0 && offset == 0
333 && (size == 0
334 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
335 && (align == BITS_PER_UNIT
336 || (STRICT_ALIGNMENT
337 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
338 return 0;
340 attrs.alias = alias;
341 attrs.expr = expr;
342 attrs.offset = offset;
343 attrs.size = size;
344 attrs.align = align;
346 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
347 if (*slot == 0)
349 *slot = ggc_alloc (sizeof (mem_attrs));
350 memcpy (*slot, &attrs, sizeof (mem_attrs));
353 return *slot;
356 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
357 don't attempt to share with the various global pieces of rtl (such as
358 frame_pointer_rtx). */
361 gen_raw_REG (mode, regno)
362 enum machine_mode mode;
363 int regno;
365 rtx x = gen_rtx_raw_REG (mode, regno);
366 ORIGINAL_REGNO (x) = regno;
367 return x;
370 /* There are some RTL codes that require special attention; the generation
371 functions do the raw handling. If you add to this list, modify
372 special_rtx in gengenrtl.c as well. */
375 gen_rtx_CONST_INT (mode, arg)
376 enum machine_mode mode ATTRIBUTE_UNUSED;
377 HOST_WIDE_INT arg;
379 void **slot;
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
387 #endif
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
392 if (*slot == 0)
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
395 return (rtx) *slot;
399 gen_int_mode (c, mode)
400 HOST_WIDE_INT c;
401 enum machine_mode mode;
403 return GEN_INT (trunc_int_for_mode (c, mode));
406 /* CONST_DOUBLEs might be created from pairs of integers, or from
407 REAL_VALUE_TYPEs. Also, their length is known only at run time,
408 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
410 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
411 hash table. If so, return its counterpart; otherwise add it
412 to the hash table and return it. */
413 static rtx
414 lookup_const_double (real)
415 rtx real;
417 void **slot = htab_find_slot (const_double_htab, real, INSERT);
418 if (*slot == 0)
419 *slot = real;
421 return (rtx) *slot;
424 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
425 VALUE in mode MODE. */
427 const_double_from_real_value (value, mode)
428 REAL_VALUE_TYPE value;
429 enum machine_mode mode;
431 rtx real = rtx_alloc (CONST_DOUBLE);
432 PUT_MODE (real, mode);
434 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
436 return lookup_const_double (real);
439 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
440 of ints: I0 is the low-order word and I1 is the high-order word.
441 Do not use this routine for non-integer modes; convert to
442 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
445 immed_double_const (i0, i1, mode)
446 HOST_WIDE_INT i0, i1;
447 enum machine_mode mode;
449 rtx value;
450 unsigned int i;
452 if (mode != VOIDmode)
454 int width;
455 if (GET_MODE_CLASS (mode) != MODE_INT
456 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
457 abort ();
459 /* We clear out all bits that don't belong in MODE, unless they and
460 our sign bit are all one. So we get either a reasonable negative
461 value or a reasonable unsigned value for this mode. */
462 width = GET_MODE_BITSIZE (mode);
463 if (width < HOST_BITS_PER_WIDE_INT
464 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
465 != ((HOST_WIDE_INT) (-1) << (width - 1))))
466 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
467 else if (width == HOST_BITS_PER_WIDE_INT
468 && ! (i1 == ~0 && i0 < 0))
469 i1 = 0;
470 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
471 /* We cannot represent this value as a constant. */
472 abort ();
474 /* If this would be an entire word for the target, but is not for
475 the host, then sign-extend on the host so that the number will
476 look the same way on the host that it would on the target.
478 For example, when building a 64 bit alpha hosted 32 bit sparc
479 targeted compiler, then we want the 32 bit unsigned value -1 to be
480 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
481 The latter confuses the sparc backend. */
483 if (width < HOST_BITS_PER_WIDE_INT
484 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
485 i0 |= ((HOST_WIDE_INT) (-1) << width);
487 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
488 CONST_INT.
490 ??? Strictly speaking, this is wrong if we create a CONST_INT for
491 a large unsigned constant with the size of MODE being
492 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
493 in a wider mode. In that case we will mis-interpret it as a
494 negative number.
496 Unfortunately, the only alternative is to make a CONST_DOUBLE for
497 any constant in any mode if it is an unsigned constant larger
498 than the maximum signed integer in an int on the host. However,
499 doing this will break everyone that always expects to see a
500 CONST_INT for SImode and smaller.
502 We have always been making CONST_INTs in this case, so nothing
503 new is being broken. */
505 if (width <= HOST_BITS_PER_WIDE_INT)
506 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
509 /* If this integer fits in one word, return a CONST_INT. */
510 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
511 return GEN_INT (i0);
513 /* We use VOIDmode for integers. */
514 value = rtx_alloc (CONST_DOUBLE);
515 PUT_MODE (value, VOIDmode);
517 CONST_DOUBLE_LOW (value) = i0;
518 CONST_DOUBLE_HIGH (value) = i1;
520 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
521 XWINT (value, i) = 0;
523 return lookup_const_double (value);
527 gen_rtx_REG (mode, regno)
528 enum machine_mode mode;
529 unsigned int regno;
531 /* In case the MD file explicitly references the frame pointer, have
532 all such references point to the same frame pointer. This is
533 used during frame pointer elimination to distinguish the explicit
534 references to these registers from pseudos that happened to be
535 assigned to them.
537 If we have eliminated the frame pointer or arg pointer, we will
538 be using it as a normal register, for example as a spill
539 register. In such cases, we might be accessing it in a mode that
540 is not Pmode and therefore cannot use the pre-allocated rtx.
542 Also don't do this when we are making new REGs in reload, since
543 we don't want to get confused with the real pointers. */
545 if (mode == Pmode && !reload_in_progress)
547 if (regno == FRAME_POINTER_REGNUM)
548 return frame_pointer_rtx;
549 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
550 if (regno == HARD_FRAME_POINTER_REGNUM)
551 return hard_frame_pointer_rtx;
552 #endif
553 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
554 if (regno == ARG_POINTER_REGNUM)
555 return arg_pointer_rtx;
556 #endif
557 #ifdef RETURN_ADDRESS_POINTER_REGNUM
558 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
559 return return_address_pointer_rtx;
560 #endif
561 if (regno == PIC_OFFSET_TABLE_REGNUM
562 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
563 return pic_offset_table_rtx;
564 if (regno == STACK_POINTER_REGNUM)
565 return stack_pointer_rtx;
568 return gen_raw_REG (mode, regno);
572 gen_rtx_MEM (mode, addr)
573 enum machine_mode mode;
574 rtx addr;
576 rtx rt = gen_rtx_raw_MEM (mode, addr);
578 /* This field is not cleared by the mere allocation of the rtx, so
579 we clear it here. */
580 MEM_ATTRS (rt) = 0;
582 return rt;
586 gen_rtx_SUBREG (mode, reg, offset)
587 enum machine_mode mode;
588 rtx reg;
589 int offset;
591 /* This is the most common failure type.
592 Catch it early so we can see who does it. */
593 if ((offset % GET_MODE_SIZE (mode)) != 0)
594 abort ();
596 /* This check isn't usable right now because combine will
597 throw arbitrary crap like a CALL into a SUBREG in
598 gen_lowpart_for_combine so we must just eat it. */
599 #if 0
600 /* Check for this too. */
601 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
602 abort ();
603 #endif
604 return gen_rtx_raw_SUBREG (mode, reg, offset);
607 /* Generate a SUBREG representing the least-significant part of REG if MODE
608 is smaller than mode of REG, otherwise paradoxical SUBREG. */
611 gen_lowpart_SUBREG (mode, reg)
612 enum machine_mode mode;
613 rtx reg;
615 enum machine_mode inmode;
617 inmode = GET_MODE (reg);
618 if (inmode == VOIDmode)
619 inmode = mode;
620 return gen_rtx_SUBREG (mode, reg,
621 subreg_lowpart_offset (mode, inmode));
624 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
626 ** This routine generates an RTX of the size specified by
627 ** <code>, which is an RTX code. The RTX structure is initialized
628 ** from the arguments <element1> through <elementn>, which are
629 ** interpreted according to the specific RTX type's format. The
630 ** special machine mode associated with the rtx (if any) is specified
631 ** in <mode>.
633 ** gen_rtx can be invoked in a way which resembles the lisp-like
634 ** rtx it will generate. For example, the following rtx structure:
636 ** (plus:QI (mem:QI (reg:SI 1))
637 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
639 ** ...would be generated by the following C code:
641 ** gen_rtx (PLUS, QImode,
642 ** gen_rtx (MEM, QImode,
643 ** gen_rtx (REG, SImode, 1)),
644 ** gen_rtx (MEM, QImode,
645 ** gen_rtx (PLUS, SImode,
646 ** gen_rtx (REG, SImode, 2),
647 ** gen_rtx (REG, SImode, 3)))),
650 /*VARARGS2*/
652 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
654 int i; /* Array indices... */
655 const char *fmt; /* Current rtx's format... */
656 rtx rt_val; /* RTX to return to caller... */
658 VA_OPEN (p, mode);
659 VA_FIXEDARG (p, enum rtx_code, code);
660 VA_FIXEDARG (p, enum machine_mode, mode);
662 switch (code)
664 case CONST_INT:
665 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
666 break;
668 case CONST_DOUBLE:
670 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
671 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
673 rt_val = immed_double_const (arg0, arg1, mode);
675 break;
677 case REG:
678 rt_val = gen_rtx_REG (mode, va_arg (p, int));
679 break;
681 case MEM:
682 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
683 break;
685 default:
686 rt_val = rtx_alloc (code); /* Allocate the storage space. */
687 rt_val->mode = mode; /* Store the machine mode... */
689 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
690 for (i = 0; i < GET_RTX_LENGTH (code); i++)
692 switch (*fmt++)
694 case '0': /* Unused field. */
695 break;
697 case 'i': /* An integer? */
698 XINT (rt_val, i) = va_arg (p, int);
699 break;
701 case 'w': /* A wide integer? */
702 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
703 break;
705 case 's': /* A string? */
706 XSTR (rt_val, i) = va_arg (p, char *);
707 break;
709 case 'e': /* An expression? */
710 case 'u': /* An insn? Same except when printing. */
711 XEXP (rt_val, i) = va_arg (p, rtx);
712 break;
714 case 'E': /* An RTX vector? */
715 XVEC (rt_val, i) = va_arg (p, rtvec);
716 break;
718 case 'b': /* A bitmap? */
719 XBITMAP (rt_val, i) = va_arg (p, bitmap);
720 break;
722 case 't': /* A tree? */
723 XTREE (rt_val, i) = va_arg (p, tree);
724 break;
726 default:
727 abort ();
730 break;
733 VA_CLOSE (p);
734 return rt_val;
737 /* gen_rtvec (n, [rt1, ..., rtn])
739 ** This routine creates an rtvec and stores within it the
740 ** pointers to rtx's which are its arguments.
743 /*VARARGS1*/
744 rtvec
745 gen_rtvec VPARAMS ((int n, ...))
747 int i, save_n;
748 rtx *vector;
750 VA_OPEN (p, n);
751 VA_FIXEDARG (p, int, n);
753 if (n == 0)
754 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
756 vector = (rtx *) alloca (n * sizeof (rtx));
758 for (i = 0; i < n; i++)
759 vector[i] = va_arg (p, rtx);
761 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
762 save_n = n;
763 VA_CLOSE (p);
765 return gen_rtvec_v (save_n, vector);
768 rtvec
769 gen_rtvec_v (n, argp)
770 int n;
771 rtx *argp;
773 int i;
774 rtvec rt_val;
776 if (n == 0)
777 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
779 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
781 for (i = 0; i < n; i++)
782 rt_val->elem[i] = *argp++;
784 return rt_val;
787 /* Generate a REG rtx for a new pseudo register of mode MODE.
788 This pseudo is assigned the next sequential register number. */
791 gen_reg_rtx (mode)
792 enum machine_mode mode;
794 struct function *f = cfun;
795 rtx val;
797 /* Don't let anything called after initial flow analysis create new
798 registers. */
799 if (no_new_pseudos)
800 abort ();
802 if (generating_concat_p
803 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
804 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
806 /* For complex modes, don't make a single pseudo.
807 Instead, make a CONCAT of two pseudos.
808 This allows noncontiguous allocation of the real and imaginary parts,
809 which makes much better code. Besides, allocating DCmode
810 pseudos overstrains reload on some machines like the 386. */
811 rtx realpart, imagpart;
812 int size = GET_MODE_UNIT_SIZE (mode);
813 enum machine_mode partmode
814 = mode_for_size (size * BITS_PER_UNIT,
815 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
816 ? MODE_FLOAT : MODE_INT),
819 realpart = gen_reg_rtx (partmode);
820 imagpart = gen_reg_rtx (partmode);
821 return gen_rtx_CONCAT (mode, realpart, imagpart);
824 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
825 enough to have an element for this pseudo reg number. */
827 if (reg_rtx_no == f->emit->regno_pointer_align_length)
829 int old_size = f->emit->regno_pointer_align_length;
830 char *new;
831 rtx *new1;
832 tree *new2;
834 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
835 memset (new + old_size, 0, old_size);
836 f->emit->regno_pointer_align = (unsigned char *) new;
838 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
839 old_size * 2 * sizeof (rtx));
840 memset (new1 + old_size, 0, old_size * sizeof (rtx));
841 regno_reg_rtx = new1;
843 new2 = (tree *) xrealloc (f->emit->regno_decl,
844 old_size * 2 * sizeof (tree));
845 memset (new2 + old_size, 0, old_size * sizeof (tree));
846 f->emit->regno_decl = new2;
848 f->emit->regno_pointer_align_length = old_size * 2;
851 val = gen_raw_REG (mode, reg_rtx_no);
852 regno_reg_rtx[reg_rtx_no++] = val;
853 return val;
856 /* Identify REG (which may be a CONCAT) as a user register. */
858 void
859 mark_user_reg (reg)
860 rtx reg;
862 if (GET_CODE (reg) == CONCAT)
864 REG_USERVAR_P (XEXP (reg, 0)) = 1;
865 REG_USERVAR_P (XEXP (reg, 1)) = 1;
867 else if (GET_CODE (reg) == REG)
868 REG_USERVAR_P (reg) = 1;
869 else
870 abort ();
873 /* Identify REG as a probable pointer register and show its alignment
874 as ALIGN, if nonzero. */
876 void
877 mark_reg_pointer (reg, align)
878 rtx reg;
879 int align;
881 if (! REG_POINTER (reg))
883 REG_POINTER (reg) = 1;
885 if (align)
886 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
888 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
889 /* We can no-longer be sure just how aligned this pointer is */
890 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
893 /* Return 1 plus largest pseudo reg number used in the current function. */
896 max_reg_num ()
898 return reg_rtx_no;
901 /* Return 1 + the largest label number used so far in the current function. */
904 max_label_num ()
906 if (last_label_num && label_num == base_label_num)
907 return last_label_num;
908 return label_num;
911 /* Return first label number used in this function (if any were used). */
914 get_first_label_num ()
916 return first_label_num;
919 /* Return the final regno of X, which is a SUBREG of a hard
920 register. */
922 subreg_hard_regno (x, check_mode)
923 rtx x;
924 int check_mode;
926 enum machine_mode mode = GET_MODE (x);
927 unsigned int byte_offset, base_regno, final_regno;
928 rtx reg = SUBREG_REG (x);
930 /* This is where we attempt to catch illegal subregs
931 created by the compiler. */
932 if (GET_CODE (x) != SUBREG
933 || GET_CODE (reg) != REG)
934 abort ();
935 base_regno = REGNO (reg);
936 if (base_regno >= FIRST_PSEUDO_REGISTER)
937 abort ();
938 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
939 abort ();
941 /* Catch non-congruent offsets too. */
942 byte_offset = SUBREG_BYTE (x);
943 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
944 abort ();
946 final_regno = subreg_regno (x);
948 return final_regno;
951 /* Return a value representing some low-order bits of X, where the number
952 of low-order bits is given by MODE. Note that no conversion is done
953 between floating-point and fixed-point values, rather, the bit
954 representation is returned.
956 This function handles the cases in common between gen_lowpart, below,
957 and two variants in cse.c and combine.c. These are the cases that can
958 be safely handled at all points in the compilation.
960 If this is not a case we can handle, return 0. */
963 gen_lowpart_common (mode, x)
964 enum machine_mode mode;
965 rtx x;
967 int msize = GET_MODE_SIZE (mode);
968 int xsize = GET_MODE_SIZE (GET_MODE (x));
969 int offset = 0;
971 if (GET_MODE (x) == mode)
972 return x;
974 /* MODE must occupy no more words than the mode of X. */
975 if (GET_MODE (x) != VOIDmode
976 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
977 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
978 return 0;
980 offset = subreg_lowpart_offset (mode, GET_MODE (x));
982 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
983 && (GET_MODE_CLASS (mode) == MODE_INT
984 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
986 /* If we are getting the low-order part of something that has been
987 sign- or zero-extended, we can either just use the object being
988 extended or make a narrower extension. If we want an even smaller
989 piece than the size of the object being extended, call ourselves
990 recursively.
992 This case is used mostly by combine and cse. */
994 if (GET_MODE (XEXP (x, 0)) == mode)
995 return XEXP (x, 0);
996 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
997 return gen_lowpart_common (mode, XEXP (x, 0));
998 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
999 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1001 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1002 || GET_CODE (x) == CONCAT)
1003 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1004 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1005 from the low-order part of the constant. */
1006 else if ((GET_MODE_CLASS (mode) == MODE_INT
1007 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1008 && GET_MODE (x) == VOIDmode
1009 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1011 /* If MODE is twice the host word size, X is already the desired
1012 representation. Otherwise, if MODE is wider than a word, we can't
1013 do this. If MODE is exactly a word, return just one CONST_INT. */
1015 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1016 return x;
1017 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1018 return 0;
1019 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1020 return (GET_CODE (x) == CONST_INT ? x
1021 : GEN_INT (CONST_DOUBLE_LOW (x)));
1022 else
1024 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1025 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1026 : CONST_DOUBLE_LOW (x));
1028 /* Sign extend to HOST_WIDE_INT. */
1029 val = trunc_int_for_mode (val, mode);
1031 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1032 : GEN_INT (val));
1036 /* The floating-point emulator can handle all conversions between
1037 FP and integer operands. This simplifies reload because it
1038 doesn't have to deal with constructs like (subreg:DI
1039 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1040 /* Single-precision floats are always 32-bits and double-precision
1041 floats are always 64-bits. */
1043 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1044 && GET_MODE_BITSIZE (mode) == 32
1045 && GET_CODE (x) == CONST_INT)
1047 REAL_VALUE_TYPE r;
1048 HOST_WIDE_INT i;
1050 i = INTVAL (x);
1051 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1052 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1054 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1055 && GET_MODE_BITSIZE (mode) == 64
1056 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1057 && GET_MODE (x) == VOIDmode)
1059 REAL_VALUE_TYPE r;
1060 HOST_WIDE_INT i[2];
1061 HOST_WIDE_INT low, high;
1063 if (GET_CODE (x) == CONST_INT)
1065 low = INTVAL (x);
1066 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1068 else
1070 low = CONST_DOUBLE_LOW (x);
1071 high = CONST_DOUBLE_HIGH (x);
1074 #if HOST_BITS_PER_WIDE_INT == 32
1075 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1076 target machine. */
1077 if (WORDS_BIG_ENDIAN)
1078 i[0] = high, i[1] = low;
1079 else
1080 i[0] = low, i[1] = high;
1081 #else
1082 i[0] = low;
1083 #endif
1085 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1086 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1088 else if ((GET_MODE_CLASS (mode) == MODE_INT
1089 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1090 && GET_CODE (x) == CONST_DOUBLE
1091 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1093 REAL_VALUE_TYPE r;
1094 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1095 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1097 /* Convert 'r' into an array of four 32-bit words in target word
1098 order. */
1099 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1100 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1102 case 32:
1103 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1104 i[1] = 0;
1105 i[2] = 0;
1106 i[3 - 3 * endian] = 0;
1107 break;
1108 case 64:
1109 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1110 i[2 - 2 * endian] = 0;
1111 i[3 - 2 * endian] = 0;
1112 break;
1113 case 96:
1114 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1115 i[3 - 3 * endian] = 0;
1116 break;
1117 case 128:
1118 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1119 break;
1120 default:
1121 abort ();
1123 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1124 and return it. */
1125 #if HOST_BITS_PER_WIDE_INT == 32
1126 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1127 #else
1128 if (HOST_BITS_PER_WIDE_INT != 64)
1129 abort ();
1131 return immed_double_const ((((unsigned long) i[3 * endian])
1132 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1133 (((unsigned long) i[2 - endian])
1134 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1135 mode);
1136 #endif
1139 /* Otherwise, we can't do this. */
1140 return 0;
1143 /* Return the real part (which has mode MODE) of a complex value X.
1144 This always comes at the low address in memory. */
1147 gen_realpart (mode, x)
1148 enum machine_mode mode;
1149 rtx x;
1151 if (WORDS_BIG_ENDIAN
1152 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1153 && REG_P (x)
1154 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1155 internal_error
1156 ("can't access real part of complex value in hard register");
1157 else if (WORDS_BIG_ENDIAN)
1158 return gen_highpart (mode, x);
1159 else
1160 return gen_lowpart (mode, x);
1163 /* Return the imaginary part (which has mode MODE) of a complex value X.
1164 This always comes at the high address in memory. */
1167 gen_imagpart (mode, x)
1168 enum machine_mode mode;
1169 rtx x;
1171 if (WORDS_BIG_ENDIAN)
1172 return gen_lowpart (mode, x);
1173 else if (! WORDS_BIG_ENDIAN
1174 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1175 && REG_P (x)
1176 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1177 internal_error
1178 ("can't access imaginary part of complex value in hard register");
1179 else
1180 return gen_highpart (mode, x);
1183 /* Return 1 iff X, assumed to be a SUBREG,
1184 refers to the real part of the complex value in its containing reg.
1185 Complex values are always stored with the real part in the first word,
1186 regardless of WORDS_BIG_ENDIAN. */
1189 subreg_realpart_p (x)
1190 rtx x;
1192 if (GET_CODE (x) != SUBREG)
1193 abort ();
1195 return ((unsigned int) SUBREG_BYTE (x)
1196 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1199 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1200 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1201 least-significant part of X.
1202 MODE specifies how big a part of X to return;
1203 it usually should not be larger than a word.
1204 If X is a MEM whose address is a QUEUED, the value may be so also. */
1207 gen_lowpart (mode, x)
1208 enum machine_mode mode;
1209 rtx x;
1211 rtx result = gen_lowpart_common (mode, x);
1213 if (result)
1214 return result;
1215 else if (GET_CODE (x) == REG)
1217 /* Must be a hard reg that's not valid in MODE. */
1218 result = gen_lowpart_common (mode, copy_to_reg (x));
1219 if (result == 0)
1220 abort ();
1221 return result;
1223 else if (GET_CODE (x) == MEM)
1225 /* The only additional case we can do is MEM. */
1226 int offset = 0;
1227 if (WORDS_BIG_ENDIAN)
1228 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1229 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1231 if (BYTES_BIG_ENDIAN)
1232 /* Adjust the address so that the address-after-the-data
1233 is unchanged. */
1234 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1235 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1237 return adjust_address (x, mode, offset);
1239 else if (GET_CODE (x) == ADDRESSOF)
1240 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1241 else
1242 abort ();
1245 /* Like `gen_lowpart', but refer to the most significant part.
1246 This is used to access the imaginary part of a complex number. */
1249 gen_highpart (mode, x)
1250 enum machine_mode mode;
1251 rtx x;
1253 unsigned int msize = GET_MODE_SIZE (mode);
1254 rtx result;
1256 /* This case loses if X is a subreg. To catch bugs early,
1257 complain if an invalid MODE is used even in other cases. */
1258 if (msize > UNITS_PER_WORD
1259 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1260 abort ();
1262 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1263 subreg_highpart_offset (mode, GET_MODE (x)));
1265 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1266 the target if we have a MEM. gen_highpart must return a valid operand,
1267 emitting code if necessary to do so. */
1268 if (result != NULL_RTX && GET_CODE (result) == MEM)
1269 result = validize_mem (result);
1271 if (!result)
1272 abort ();
1273 return result;
1276 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1277 be VOIDmode constant. */
1279 gen_highpart_mode (outermode, innermode, exp)
1280 enum machine_mode outermode, innermode;
1281 rtx exp;
1283 if (GET_MODE (exp) != VOIDmode)
1285 if (GET_MODE (exp) != innermode)
1286 abort ();
1287 return gen_highpart (outermode, exp);
1289 return simplify_gen_subreg (outermode, exp, innermode,
1290 subreg_highpart_offset (outermode, innermode));
1293 /* Return offset in bytes to get OUTERMODE low part
1294 of the value in mode INNERMODE stored in memory in target format. */
1296 unsigned int
1297 subreg_lowpart_offset (outermode, innermode)
1298 enum machine_mode outermode, innermode;
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303 if (difference > 0)
1305 if (WORDS_BIG_ENDIAN)
1306 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1307 if (BYTES_BIG_ENDIAN)
1308 offset += difference % UNITS_PER_WORD;
1311 return offset;
1314 /* Return offset in bytes to get OUTERMODE high part
1315 of the value in mode INNERMODE stored in memory in target format. */
1316 unsigned int
1317 subreg_highpart_offset (outermode, innermode)
1318 enum machine_mode outermode, innermode;
1320 unsigned int offset = 0;
1321 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1323 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1324 abort ();
1326 if (difference > 0)
1328 if (! WORDS_BIG_ENDIAN)
1329 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1330 if (! BYTES_BIG_ENDIAN)
1331 offset += difference % UNITS_PER_WORD;
1334 return offset;
1337 /* Return 1 iff X, assumed to be a SUBREG,
1338 refers to the least significant part of its containing reg.
1339 If X is not a SUBREG, always return 1 (it is its own low part!). */
1342 subreg_lowpart_p (x)
1343 rtx x;
1345 if (GET_CODE (x) != SUBREG)
1346 return 1;
1347 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1348 return 0;
1350 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1351 == SUBREG_BYTE (x));
1355 /* Helper routine for all the constant cases of operand_subword.
1356 Some places invoke this directly. */
1359 constant_subword (op, offset, mode)
1360 rtx op;
1361 int offset;
1362 enum machine_mode mode;
1364 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1365 HOST_WIDE_INT val;
1367 /* If OP is already an integer word, return it. */
1368 if (GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1370 return op;
1372 /* The output is some bits, the width of the target machine's word.
1373 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1374 host can't. */
1375 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1376 && GET_MODE_CLASS (mode) == MODE_FLOAT
1377 && GET_MODE_BITSIZE (mode) == 64
1378 && GET_CODE (op) == CONST_DOUBLE)
1380 long k[2];
1381 REAL_VALUE_TYPE rv;
1383 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1384 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1386 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1387 which the words are written depends on the word endianness.
1388 ??? This is a potential portability problem and should
1389 be fixed at some point.
1391 We must exercise caution with the sign bit. By definition there
1392 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1393 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1394 So we explicitly mask and sign-extend as necessary. */
1395 if (BITS_PER_WORD == 32)
1397 val = k[offset];
1398 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1399 return GEN_INT (val);
1401 #if HOST_BITS_PER_WIDE_INT >= 64
1402 else if (BITS_PER_WORD >= 64 && offset == 0)
1404 val = k[! WORDS_BIG_ENDIAN];
1405 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1406 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1407 return GEN_INT (val);
1409 #endif
1410 else if (BITS_PER_WORD == 16)
1412 val = k[offset >> 1];
1413 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1414 val >>= 16;
1415 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1416 return GEN_INT (val);
1418 else
1419 abort ();
1421 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1422 && GET_MODE_CLASS (mode) == MODE_FLOAT
1423 && GET_MODE_BITSIZE (mode) > 64
1424 && GET_CODE (op) == CONST_DOUBLE)
1426 long k[4];
1427 REAL_VALUE_TYPE rv;
1429 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1430 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1432 if (BITS_PER_WORD == 32)
1434 val = k[offset];
1435 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1436 return GEN_INT (val);
1438 #if HOST_BITS_PER_WIDE_INT >= 64
1439 else if (BITS_PER_WORD >= 64 && offset <= 1)
1441 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1442 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1443 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1444 return GEN_INT (val);
1446 #endif
1447 else
1448 abort ();
1451 /* Single word float is a little harder, since single- and double-word
1452 values often do not have the same high-order bits. We have already
1453 verified that we want the only defined word of the single-word value. */
1454 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1455 && GET_MODE_BITSIZE (mode) == 32
1456 && GET_CODE (op) == CONST_DOUBLE)
1458 long l;
1459 REAL_VALUE_TYPE rv;
1461 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1462 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1464 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1465 val = l;
1466 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1468 if (BITS_PER_WORD == 16)
1470 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1471 val >>= 16;
1472 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1475 return GEN_INT (val);
1478 /* The only remaining cases that we can handle are integers.
1479 Convert to proper endianness now since these cases need it.
1480 At this point, offset == 0 means the low-order word.
1482 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1483 in general. However, if OP is (const_int 0), we can just return
1484 it for any word. */
1486 if (op == const0_rtx)
1487 return op;
1489 if (GET_MODE_CLASS (mode) != MODE_INT
1490 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1491 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1492 return 0;
1494 if (WORDS_BIG_ENDIAN)
1495 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1497 /* Find out which word on the host machine this value is in and get
1498 it from the constant. */
1499 val = (offset / size_ratio == 0
1500 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1501 : (GET_CODE (op) == CONST_INT
1502 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1504 /* Get the value we want into the low bits of val. */
1505 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1506 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1508 val = trunc_int_for_mode (val, word_mode);
1510 return GEN_INT (val);
1513 /* Return subword OFFSET of operand OP.
1514 The word number, OFFSET, is interpreted as the word number starting
1515 at the low-order address. OFFSET 0 is the low-order word if not
1516 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1518 If we cannot extract the required word, we return zero. Otherwise,
1519 an rtx corresponding to the requested word will be returned.
1521 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1522 reload has completed, a valid address will always be returned. After
1523 reload, if a valid address cannot be returned, we return zero.
1525 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1526 it is the responsibility of the caller.
1528 MODE is the mode of OP in case it is a CONST_INT.
1530 ??? This is still rather broken for some cases. The problem for the
1531 moment is that all callers of this thing provide no 'goal mode' to
1532 tell us to work with. This exists because all callers were written
1533 in a word based SUBREG world.
1534 Now use of this function can be deprecated by simplify_subreg in most
1535 cases.
1539 operand_subword (op, offset, validate_address, mode)
1540 rtx op;
1541 unsigned int offset;
1542 int validate_address;
1543 enum machine_mode mode;
1545 if (mode == VOIDmode)
1546 mode = GET_MODE (op);
1548 if (mode == VOIDmode)
1549 abort ();
1551 /* If OP is narrower than a word, fail. */
1552 if (mode != BLKmode
1553 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1554 return 0;
1556 /* If we want a word outside OP, return zero. */
1557 if (mode != BLKmode
1558 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1559 return const0_rtx;
1561 /* Form a new MEM at the requested address. */
1562 if (GET_CODE (op) == MEM)
1564 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1566 if (! validate_address)
1567 return new;
1569 else if (reload_completed)
1571 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1572 return 0;
1574 else
1575 return replace_equiv_address (new, XEXP (new, 0));
1578 /* Rest can be handled by simplify_subreg. */
1579 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1582 /* Similar to `operand_subword', but never return 0. If we can't extract
1583 the required subword, put OP into a register and try again. If that fails,
1584 abort. We always validate the address in this case.
1586 MODE is the mode of OP, in case it is CONST_INT. */
1589 operand_subword_force (op, offset, mode)
1590 rtx op;
1591 unsigned int offset;
1592 enum machine_mode mode;
1594 rtx result = operand_subword (op, offset, 1, mode);
1596 if (result)
1597 return result;
1599 if (mode != BLKmode && mode != VOIDmode)
1601 /* If this is a register which can not be accessed by words, copy it
1602 to a pseudo register. */
1603 if (GET_CODE (op) == REG)
1604 op = copy_to_reg (op);
1605 else
1606 op = force_reg (mode, op);
1609 result = operand_subword (op, offset, 1, mode);
1610 if (result == 0)
1611 abort ();
1613 return result;
1616 /* Given a compare instruction, swap the operands.
1617 A test instruction is changed into a compare of 0 against the operand. */
1619 void
1620 reverse_comparison (insn)
1621 rtx insn;
1623 rtx body = PATTERN (insn);
1624 rtx comp;
1626 if (GET_CODE (body) == SET)
1627 comp = SET_SRC (body);
1628 else
1629 comp = SET_SRC (XVECEXP (body, 0, 0));
1631 if (GET_CODE (comp) == COMPARE)
1633 rtx op0 = XEXP (comp, 0);
1634 rtx op1 = XEXP (comp, 1);
1635 XEXP (comp, 0) = op1;
1636 XEXP (comp, 1) = op0;
1638 else
1640 rtx new = gen_rtx_COMPARE (VOIDmode,
1641 CONST0_RTX (GET_MODE (comp)), comp);
1642 if (GET_CODE (body) == SET)
1643 SET_SRC (body) = new;
1644 else
1645 SET_SRC (XVECEXP (body, 0, 0)) = new;
1649 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1650 or (2) a component ref of something variable. Represent the later with
1651 a NULL expression. */
1653 static tree
1654 component_ref_for_mem_expr (ref)
1655 tree ref;
1657 tree inner = TREE_OPERAND (ref, 0);
1659 if (TREE_CODE (inner) == COMPONENT_REF)
1660 inner = component_ref_for_mem_expr (inner);
1661 else
1663 tree placeholder_ptr = 0;
1665 /* Now remove any conversions: they don't change what the underlying
1666 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1667 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1668 || TREE_CODE (inner) == NON_LVALUE_EXPR
1669 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1670 || TREE_CODE (inner) == SAVE_EXPR
1671 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1672 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1673 inner = find_placeholder (inner, &placeholder_ptr);
1674 else
1675 inner = TREE_OPERAND (inner, 0);
1677 if (! DECL_P (inner))
1678 inner = NULL_TREE;
1681 if (inner == TREE_OPERAND (ref, 0))
1682 return ref;
1683 else
1684 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1685 TREE_OPERAND (ref, 1));
1688 /* Given REF, a MEM, and T, either the type of X or the expression
1689 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1690 if we are making a new object of this type. */
1692 void
1693 set_mem_attributes (ref, t, objectp)
1694 rtx ref;
1695 tree t;
1696 int objectp;
1698 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1699 tree expr = MEM_EXPR (ref);
1700 rtx offset = MEM_OFFSET (ref);
1701 rtx size = MEM_SIZE (ref);
1702 unsigned int align = MEM_ALIGN (ref);
1703 tree type;
1705 /* It can happen that type_for_mode was given a mode for which there
1706 is no language-level type. In which case it returns NULL, which
1707 we can see here. */
1708 if (t == NULL_TREE)
1709 return;
1711 type = TYPE_P (t) ? t : TREE_TYPE (t);
1713 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1714 wrong answer, as it assumes that DECL_RTL already has the right alias
1715 info. Callers should not set DECL_RTL until after the call to
1716 set_mem_attributes. */
1717 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1718 abort ();
1720 /* Get the alias set from the expression or type (perhaps using a
1721 front-end routine) and use it. */
1722 alias = get_alias_set (t);
1724 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1725 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1726 RTX_UNCHANGING_P (ref)
1727 |= ((lang_hooks.honor_readonly
1728 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1729 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1731 /* If we are making an object of this type, or if this is a DECL, we know
1732 that it is a scalar if the type is not an aggregate. */
1733 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1734 MEM_SCALAR_P (ref) = 1;
1736 /* We can set the alignment from the type if we are making an object,
1737 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1738 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1739 align = MAX (align, TYPE_ALIGN (type));
1741 /* If the size is known, we can set that. */
1742 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1743 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1745 /* If T is not a type, we may be able to deduce some more information about
1746 the expression. */
1747 if (! TYPE_P (t))
1749 maybe_set_unchanging (ref, t);
1750 if (TREE_THIS_VOLATILE (t))
1751 MEM_VOLATILE_P (ref) = 1;
1753 /* Now remove any conversions: they don't change what the underlying
1754 object is. Likewise for SAVE_EXPR. */
1755 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1756 || TREE_CODE (t) == NON_LVALUE_EXPR
1757 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1758 || TREE_CODE (t) == SAVE_EXPR)
1759 t = TREE_OPERAND (t, 0);
1761 /* If this expression can't be addressed (e.g., it contains a reference
1762 to a non-addressable field), show we don't change its alias set. */
1763 if (! can_address_p (t))
1764 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1766 /* If this is a decl, set the attributes of the MEM from it. */
1767 if (DECL_P (t))
1769 expr = t;
1770 offset = const0_rtx;
1771 size = (DECL_SIZE_UNIT (t)
1772 && host_integerp (DECL_SIZE_UNIT (t), 1)
1773 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1774 align = DECL_ALIGN (t);
1777 /* If this is a constant, we know the alignment. */
1778 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1780 align = TYPE_ALIGN (type);
1781 #ifdef CONSTANT_ALIGNMENT
1782 align = CONSTANT_ALIGNMENT (t, align);
1783 #endif
1786 /* If this is a field reference and not a bit-field, record it. */
1787 /* ??? There is some information that can be gleened from bit-fields,
1788 such as the word offset in the structure that might be modified.
1789 But skip it for now. */
1790 else if (TREE_CODE (t) == COMPONENT_REF
1791 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1793 expr = component_ref_for_mem_expr (t);
1794 offset = const0_rtx;
1795 /* ??? Any reason the field size would be different than
1796 the size we got from the type? */
1799 /* If this is an array reference, look for an outer field reference. */
1800 else if (TREE_CODE (t) == ARRAY_REF)
1802 tree off_tree = size_zero_node;
1806 off_tree
1807 = fold (build (PLUS_EXPR, sizetype,
1808 fold (build (MULT_EXPR, sizetype,
1809 TREE_OPERAND (t, 1),
1810 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1811 off_tree));
1812 t = TREE_OPERAND (t, 0);
1814 while (TREE_CODE (t) == ARRAY_REF);
1816 if (TREE_CODE (t) == COMPONENT_REF)
1818 expr = component_ref_for_mem_expr (t);
1819 if (host_integerp (off_tree, 1))
1820 offset = GEN_INT (tree_low_cst (off_tree, 1));
1821 /* ??? Any reason the field size would be different than
1822 the size we got from the type? */
1827 /* Now set the attributes we computed above. */
1828 MEM_ATTRS (ref)
1829 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1831 /* If this is already known to be a scalar or aggregate, we are done. */
1832 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1833 return;
1835 /* If it is a reference into an aggregate, this is part of an aggregate.
1836 Otherwise we don't know. */
1837 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1838 || TREE_CODE (t) == ARRAY_RANGE_REF
1839 || TREE_CODE (t) == BIT_FIELD_REF)
1840 MEM_IN_STRUCT_P (ref) = 1;
1843 /* Set the alias set of MEM to SET. */
1845 void
1846 set_mem_alias_set (mem, set)
1847 rtx mem;
1848 HOST_WIDE_INT set;
1850 #ifdef ENABLE_CHECKING
1851 /* If the new and old alias sets don't conflict, something is wrong. */
1852 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1853 abort ();
1854 #endif
1856 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1857 MEM_SIZE (mem), MEM_ALIGN (mem),
1858 GET_MODE (mem));
1861 /* Set the alignment of MEM to ALIGN bits. */
1863 void
1864 set_mem_align (mem, align)
1865 rtx mem;
1866 unsigned int align;
1868 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1869 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1870 GET_MODE (mem));
1873 /* Set the expr for MEM to EXPR. */
1875 void
1876 set_mem_expr (mem, expr)
1877 rtx mem;
1878 tree expr;
1880 MEM_ATTRS (mem)
1881 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1882 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1885 /* Set the offset of MEM to OFFSET. */
1887 void
1888 set_mem_offset (mem, offset)
1889 rtx mem, offset;
1891 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1892 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1893 GET_MODE (mem));
1896 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1897 and its address changed to ADDR. (VOIDmode means don't change the mode.
1898 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1899 returned memory location is required to be valid. The memory
1900 attributes are not changed. */
1902 static rtx
1903 change_address_1 (memref, mode, addr, validate)
1904 rtx memref;
1905 enum machine_mode mode;
1906 rtx addr;
1907 int validate;
1909 rtx new;
1911 if (GET_CODE (memref) != MEM)
1912 abort ();
1913 if (mode == VOIDmode)
1914 mode = GET_MODE (memref);
1915 if (addr == 0)
1916 addr = XEXP (memref, 0);
1918 if (validate)
1920 if (reload_in_progress || reload_completed)
1922 if (! memory_address_p (mode, addr))
1923 abort ();
1925 else
1926 addr = memory_address (mode, addr);
1929 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1930 return memref;
1932 new = gen_rtx_MEM (mode, addr);
1933 MEM_COPY_ATTRIBUTES (new, memref);
1934 return new;
1937 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1938 way we are changing MEMREF, so we only preserve the alias set. */
1941 change_address (memref, mode, addr)
1942 rtx memref;
1943 enum machine_mode mode;
1944 rtx addr;
1946 rtx new = change_address_1 (memref, mode, addr, 1);
1947 enum machine_mode mmode = GET_MODE (new);
1949 MEM_ATTRS (new)
1950 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1951 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1952 (mmode == BLKmode ? BITS_PER_UNIT
1953 : GET_MODE_ALIGNMENT (mmode)),
1954 mmode);
1956 return new;
1959 /* Return a memory reference like MEMREF, but with its mode changed
1960 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1961 nonzero, the memory address is forced to be valid.
1962 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1963 and caller is responsible for adjusting MEMREF base register. */
1966 adjust_address_1 (memref, mode, offset, validate, adjust)
1967 rtx memref;
1968 enum machine_mode mode;
1969 HOST_WIDE_INT offset;
1970 int validate, adjust;
1972 rtx addr = XEXP (memref, 0);
1973 rtx new;
1974 rtx memoffset = MEM_OFFSET (memref);
1975 rtx size = 0;
1976 unsigned int memalign = MEM_ALIGN (memref);
1978 /* ??? Prefer to create garbage instead of creating shared rtl.
1979 This may happen even if offset is non-zero -- consider
1980 (plus (plus reg reg) const_int) -- so do this always. */
1981 addr = copy_rtx (addr);
1983 if (adjust)
1985 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1986 object, we can merge it into the LO_SUM. */
1987 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1988 && offset >= 0
1989 && (unsigned HOST_WIDE_INT) offset
1990 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1991 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1992 plus_constant (XEXP (addr, 1), offset));
1993 else
1994 addr = plus_constant (addr, offset);
1997 new = change_address_1 (memref, mode, addr, validate);
1999 /* Compute the new values of the memory attributes due to this adjustment.
2000 We add the offsets and update the alignment. */
2001 if (memoffset)
2002 memoffset = GEN_INT (offset + INTVAL (memoffset));
2004 /* Compute the new alignment by taking the MIN of the alignment and the
2005 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2006 if zero. */
2007 if (offset != 0)
2008 memalign
2009 = MIN (memalign,
2010 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2012 /* We can compute the size in a number of ways. */
2013 if (GET_MODE (new) != BLKmode)
2014 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2015 else if (MEM_SIZE (memref))
2016 size = plus_constant (MEM_SIZE (memref), -offset);
2018 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2019 memoffset, size, memalign, GET_MODE (new));
2021 /* At some point, we should validate that this offset is within the object,
2022 if all the appropriate values are known. */
2023 return new;
2026 /* Return a memory reference like MEMREF, but with its mode changed
2027 to MODE and its address changed to ADDR, which is assumed to be
2028 MEMREF offseted by OFFSET bytes. If VALIDATE is
2029 nonzero, the memory address is forced to be valid. */
2032 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2033 rtx memref;
2034 enum machine_mode mode;
2035 rtx addr;
2036 HOST_WIDE_INT offset;
2037 int validate;
2039 memref = change_address_1 (memref, VOIDmode, addr, validate);
2040 return adjust_address_1 (memref, mode, offset, validate, 0);
2043 /* Return a memory reference like MEMREF, but whose address is changed by
2044 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2045 known to be in OFFSET (possibly 1). */
2048 offset_address (memref, offset, pow2)
2049 rtx memref;
2050 rtx offset;
2051 HOST_WIDE_INT pow2;
2053 rtx new, addr = XEXP (memref, 0);
2055 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2057 /* At this point we don't know _why_ the address is invalid. It
2058 could have secondary memory refereces, multiplies or anything.
2060 However, if we did go and rearrange things, we can wind up not
2061 being able to recognize the magic around pic_offset_table_rtx.
2062 This stuff is fragile, and is yet another example of why it is
2063 bad to expose PIC machinery too early. */
2064 if (! memory_address_p (GET_MODE (memref), new)
2065 && GET_CODE (addr) == PLUS
2066 && XEXP (addr, 0) == pic_offset_table_rtx)
2068 addr = force_reg (GET_MODE (addr), addr);
2069 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2072 update_temp_slot_address (XEXP (memref, 0), new);
2073 new = change_address_1 (memref, VOIDmode, new, 1);
2075 /* Update the alignment to reflect the offset. Reset the offset, which
2076 we don't know. */
2077 MEM_ATTRS (new)
2078 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2079 MIN (MEM_ALIGN (memref),
2080 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2081 GET_MODE (new));
2082 return new;
2085 /* Return a memory reference like MEMREF, but with its address changed to
2086 ADDR. The caller is asserting that the actual piece of memory pointed
2087 to is the same, just the form of the address is being changed, such as
2088 by putting something into a register. */
2091 replace_equiv_address (memref, addr)
2092 rtx memref;
2093 rtx addr;
2095 /* change_address_1 copies the memory attribute structure without change
2096 and that's exactly what we want here. */
2097 update_temp_slot_address (XEXP (memref, 0), addr);
2098 return change_address_1 (memref, VOIDmode, addr, 1);
2101 /* Likewise, but the reference is not required to be valid. */
2104 replace_equiv_address_nv (memref, addr)
2105 rtx memref;
2106 rtx addr;
2108 return change_address_1 (memref, VOIDmode, addr, 0);
2111 /* Return a memory reference like MEMREF, but with its mode widened to
2112 MODE and offset by OFFSET. This would be used by targets that e.g.
2113 cannot issue QImode memory operations and have to use SImode memory
2114 operations plus masking logic. */
2117 widen_memory_access (memref, mode, offset)
2118 rtx memref;
2119 enum machine_mode mode;
2120 HOST_WIDE_INT offset;
2122 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2123 tree expr = MEM_EXPR (new);
2124 rtx memoffset = MEM_OFFSET (new);
2125 unsigned int size = GET_MODE_SIZE (mode);
2127 /* If we don't know what offset we were at within the expression, then
2128 we can't know if we've overstepped the bounds. */
2129 if (! memoffset)
2130 expr = NULL_TREE;
2132 while (expr)
2134 if (TREE_CODE (expr) == COMPONENT_REF)
2136 tree field = TREE_OPERAND (expr, 1);
2138 if (! DECL_SIZE_UNIT (field))
2140 expr = NULL_TREE;
2141 break;
2144 /* Is the field at least as large as the access? If so, ok,
2145 otherwise strip back to the containing structure. */
2146 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2147 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2148 && INTVAL (memoffset) >= 0)
2149 break;
2151 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2153 expr = NULL_TREE;
2154 break;
2157 expr = TREE_OPERAND (expr, 0);
2158 memoffset = (GEN_INT (INTVAL (memoffset)
2159 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2160 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2161 / BITS_PER_UNIT)));
2163 /* Similarly for the decl. */
2164 else if (DECL_P (expr)
2165 && DECL_SIZE_UNIT (expr)
2166 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2167 && (! memoffset || INTVAL (memoffset) >= 0))
2168 break;
2169 else
2171 /* The widened memory access overflows the expression, which means
2172 that it could alias another expression. Zap it. */
2173 expr = NULL_TREE;
2174 break;
2178 if (! expr)
2179 memoffset = NULL_RTX;
2181 /* The widened memory may alias other stuff, so zap the alias set. */
2182 /* ??? Maybe use get_alias_set on any remaining expression. */
2184 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2185 MEM_ALIGN (new), mode);
2187 return new;
2190 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2193 gen_label_rtx ()
2195 rtx label;
2197 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2198 NULL_RTX, label_num++, NULL, NULL);
2200 LABEL_NUSES (label) = 0;
2201 LABEL_ALTERNATE_NAME (label) = NULL;
2202 return label;
2205 /* For procedure integration. */
2207 /* Install new pointers to the first and last insns in the chain.
2208 Also, set cur_insn_uid to one higher than the last in use.
2209 Used for an inline-procedure after copying the insn chain. */
2211 void
2212 set_new_first_and_last_insn (first, last)
2213 rtx first, last;
2215 rtx insn;
2217 first_insn = first;
2218 last_insn = last;
2219 cur_insn_uid = 0;
2221 for (insn = first; insn; insn = NEXT_INSN (insn))
2222 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2224 cur_insn_uid++;
2227 /* Set the range of label numbers found in the current function.
2228 This is used when belatedly compiling an inline function. */
2230 void
2231 set_new_first_and_last_label_num (first, last)
2232 int first, last;
2234 base_label_num = label_num;
2235 first_label_num = first;
2236 last_label_num = last;
2239 /* Set the last label number found in the current function.
2240 This is used when belatedly compiling an inline function. */
2242 void
2243 set_new_last_label_num (last)
2244 int last;
2246 base_label_num = label_num;
2247 last_label_num = last;
2250 /* Restore all variables describing the current status from the structure *P.
2251 This is used after a nested function. */
2253 void
2254 restore_emit_status (p)
2255 struct function *p ATTRIBUTE_UNUSED;
2257 last_label_num = 0;
2258 clear_emit_caches ();
2261 /* Clear out all parts of the state in F that can safely be discarded
2262 after the function has been compiled, to let garbage collection
2263 reclaim the memory. */
2265 void
2266 free_emit_status (f)
2267 struct function *f;
2269 free (f->emit->x_regno_reg_rtx);
2270 free (f->emit->regno_pointer_align);
2271 free (f->emit->regno_decl);
2272 free (f->emit);
2273 f->emit = NULL;
2276 /* Go through all the RTL insn bodies and copy any invalid shared
2277 structure. This routine should only be called once. */
2279 void
2280 unshare_all_rtl (fndecl, insn)
2281 tree fndecl;
2282 rtx insn;
2284 tree decl;
2286 /* Make sure that virtual parameters are not shared. */
2287 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2288 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2290 /* Make sure that virtual stack slots are not shared. */
2291 unshare_all_decls (DECL_INITIAL (fndecl));
2293 /* Unshare just about everything else. */
2294 unshare_all_rtl_1 (insn);
2296 /* Make sure the addresses of stack slots found outside the insn chain
2297 (such as, in DECL_RTL of a variable) are not shared
2298 with the insn chain.
2300 This special care is necessary when the stack slot MEM does not
2301 actually appear in the insn chain. If it does appear, its address
2302 is unshared from all else at that point. */
2303 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2306 /* Go through all the RTL insn bodies and copy any invalid shared
2307 structure, again. This is a fairly expensive thing to do so it
2308 should be done sparingly. */
2310 void
2311 unshare_all_rtl_again (insn)
2312 rtx insn;
2314 rtx p;
2315 tree decl;
2317 for (p = insn; p; p = NEXT_INSN (p))
2318 if (INSN_P (p))
2320 reset_used_flags (PATTERN (p));
2321 reset_used_flags (REG_NOTES (p));
2322 reset_used_flags (LOG_LINKS (p));
2325 /* Make sure that virtual stack slots are not shared. */
2326 reset_used_decls (DECL_INITIAL (cfun->decl));
2328 /* Make sure that virtual parameters are not shared. */
2329 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2330 reset_used_flags (DECL_RTL (decl));
2332 reset_used_flags (stack_slot_list);
2334 unshare_all_rtl (cfun->decl, insn);
2337 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2338 Assumes the mark bits are cleared at entry. */
2340 static void
2341 unshare_all_rtl_1 (insn)
2342 rtx insn;
2344 for (; insn; insn = NEXT_INSN (insn))
2345 if (INSN_P (insn))
2347 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2348 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2349 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2353 /* Go through all virtual stack slots of a function and copy any
2354 shared structure. */
2355 static void
2356 unshare_all_decls (blk)
2357 tree blk;
2359 tree t;
2361 /* Copy shared decls. */
2362 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2363 if (DECL_RTL_SET_P (t))
2364 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2366 /* Now process sub-blocks. */
2367 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2368 unshare_all_decls (t);
2371 /* Go through all virtual stack slots of a function and mark them as
2372 not shared. */
2373 static void
2374 reset_used_decls (blk)
2375 tree blk;
2377 tree t;
2379 /* Mark decls. */
2380 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2381 if (DECL_RTL_SET_P (t))
2382 reset_used_flags (DECL_RTL (t));
2384 /* Now process sub-blocks. */
2385 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2386 reset_used_decls (t);
2389 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2390 placed in the result directly, rather than being copied. MAY_SHARE is
2391 either a MEM of an EXPR_LIST of MEMs. */
2394 copy_most_rtx (orig, may_share)
2395 rtx orig;
2396 rtx may_share;
2398 rtx copy;
2399 int i, j;
2400 RTX_CODE code;
2401 const char *format_ptr;
2403 if (orig == may_share
2404 || (GET_CODE (may_share) == EXPR_LIST
2405 && in_expr_list_p (may_share, orig)))
2406 return orig;
2408 code = GET_CODE (orig);
2410 switch (code)
2412 case REG:
2413 case QUEUED:
2414 case CONST_INT:
2415 case CONST_DOUBLE:
2416 case CONST_VECTOR:
2417 case SYMBOL_REF:
2418 case CODE_LABEL:
2419 case PC:
2420 case CC0:
2421 return orig;
2422 default:
2423 break;
2426 copy = rtx_alloc (code);
2427 PUT_MODE (copy, GET_MODE (orig));
2428 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2429 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2430 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2431 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2432 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2434 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2436 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2438 switch (*format_ptr++)
2440 case 'e':
2441 XEXP (copy, i) = XEXP (orig, i);
2442 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2443 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2444 break;
2446 case 'u':
2447 XEXP (copy, i) = XEXP (orig, i);
2448 break;
2450 case 'E':
2451 case 'V':
2452 XVEC (copy, i) = XVEC (orig, i);
2453 if (XVEC (orig, i) != NULL)
2455 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2456 for (j = 0; j < XVECLEN (copy, i); j++)
2457 XVECEXP (copy, i, j)
2458 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2460 break;
2462 case 'w':
2463 XWINT (copy, i) = XWINT (orig, i);
2464 break;
2466 case 'n':
2467 case 'i':
2468 XINT (copy, i) = XINT (orig, i);
2469 break;
2471 case 't':
2472 XTREE (copy, i) = XTREE (orig, i);
2473 break;
2475 case 's':
2476 case 'S':
2477 XSTR (copy, i) = XSTR (orig, i);
2478 break;
2480 case '0':
2481 /* Copy this through the wide int field; that's safest. */
2482 X0WINT (copy, i) = X0WINT (orig, i);
2483 break;
2485 default:
2486 abort ();
2489 return copy;
2492 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2493 Recursively does the same for subexpressions. */
2496 copy_rtx_if_shared (orig)
2497 rtx orig;
2499 rtx x = orig;
2500 int i;
2501 enum rtx_code code;
2502 const char *format_ptr;
2503 int copied = 0;
2505 if (x == 0)
2506 return 0;
2508 code = GET_CODE (x);
2510 /* These types may be freely shared. */
2512 switch (code)
2514 case REG:
2515 case QUEUED:
2516 case CONST_INT:
2517 case CONST_DOUBLE:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case CODE_LABEL:
2521 case PC:
2522 case CC0:
2523 case SCRATCH:
2524 /* SCRATCH must be shared because they represent distinct values. */
2525 return x;
2527 case CONST:
2528 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2529 a LABEL_REF, it isn't sharable. */
2530 if (GET_CODE (XEXP (x, 0)) == PLUS
2531 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2532 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2533 return x;
2534 break;
2536 case INSN:
2537 case JUMP_INSN:
2538 case CALL_INSN:
2539 case NOTE:
2540 case BARRIER:
2541 /* The chain of insns is not being copied. */
2542 return x;
2544 case MEM:
2545 /* A MEM is allowed to be shared if its address is constant.
2547 We used to allow sharing of MEMs which referenced
2548 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2549 that can lose. instantiate_virtual_regs will not unshare
2550 the MEMs, and combine may change the structure of the address
2551 because it looks safe and profitable in one context, but
2552 in some other context it creates unrecognizable RTL. */
2553 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2554 return x;
2556 break;
2558 default:
2559 break;
2562 /* This rtx may not be shared. If it has already been seen,
2563 replace it with a copy of itself. */
2565 if (RTX_FLAG (x, used))
2567 rtx copy;
2569 copy = rtx_alloc (code);
2570 memcpy (copy, x,
2571 (sizeof (*copy) - sizeof (copy->fld)
2572 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2573 x = copy;
2574 copied = 1;
2576 RTX_FLAG (x, used) = 1;
2578 /* Now scan the subexpressions recursively.
2579 We can store any replaced subexpressions directly into X
2580 since we know X is not shared! Any vectors in X
2581 must be copied if X was copied. */
2583 format_ptr = GET_RTX_FORMAT (code);
2585 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2587 switch (*format_ptr++)
2589 case 'e':
2590 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2591 break;
2593 case 'E':
2594 if (XVEC (x, i) != NULL)
2596 int j;
2597 int len = XVECLEN (x, i);
2599 if (copied && len > 0)
2600 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2601 for (j = 0; j < len; j++)
2602 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2604 break;
2607 return x;
2610 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2611 to look for shared sub-parts. */
2613 void
2614 reset_used_flags (x)
2615 rtx x;
2617 int i, j;
2618 enum rtx_code code;
2619 const char *format_ptr;
2621 if (x == 0)
2622 return;
2624 code = GET_CODE (x);
2626 /* These types may be freely shared so we needn't do any resetting
2627 for them. */
2629 switch (code)
2631 case REG:
2632 case QUEUED:
2633 case CONST_INT:
2634 case CONST_DOUBLE:
2635 case CONST_VECTOR:
2636 case SYMBOL_REF:
2637 case CODE_LABEL:
2638 case PC:
2639 case CC0:
2640 return;
2642 case INSN:
2643 case JUMP_INSN:
2644 case CALL_INSN:
2645 case NOTE:
2646 case LABEL_REF:
2647 case BARRIER:
2648 /* The chain of insns is not being copied. */
2649 return;
2651 default:
2652 break;
2655 RTX_FLAG (x, used) = 0;
2657 format_ptr = GET_RTX_FORMAT (code);
2658 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2660 switch (*format_ptr++)
2662 case 'e':
2663 reset_used_flags (XEXP (x, i));
2664 break;
2666 case 'E':
2667 for (j = 0; j < XVECLEN (x, i); j++)
2668 reset_used_flags (XVECEXP (x, i, j));
2669 break;
2674 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2675 Return X or the rtx for the pseudo reg the value of X was copied into.
2676 OTHER must be valid as a SET_DEST. */
2679 make_safe_from (x, other)
2680 rtx x, other;
2682 while (1)
2683 switch (GET_CODE (other))
2685 case SUBREG:
2686 other = SUBREG_REG (other);
2687 break;
2688 case STRICT_LOW_PART:
2689 case SIGN_EXTEND:
2690 case ZERO_EXTEND:
2691 other = XEXP (other, 0);
2692 break;
2693 default:
2694 goto done;
2696 done:
2697 if ((GET_CODE (other) == MEM
2698 && ! CONSTANT_P (x)
2699 && GET_CODE (x) != REG
2700 && GET_CODE (x) != SUBREG)
2701 || (GET_CODE (other) == REG
2702 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2703 || reg_mentioned_p (other, x))))
2705 rtx temp = gen_reg_rtx (GET_MODE (x));
2706 emit_move_insn (temp, x);
2707 return temp;
2709 return x;
2712 /* Emission of insns (adding them to the doubly-linked list). */
2714 /* Return the first insn of the current sequence or current function. */
2717 get_insns ()
2719 return first_insn;
2722 /* Specify a new insn as the first in the chain. */
2724 void
2725 set_first_insn (insn)
2726 rtx insn;
2728 if (PREV_INSN (insn) != 0)
2729 abort ();
2730 first_insn = insn;
2733 /* Return the last insn emitted in current sequence or current function. */
2736 get_last_insn ()
2738 return last_insn;
2741 /* Specify a new insn as the last in the chain. */
2743 void
2744 set_last_insn (insn)
2745 rtx insn;
2747 if (NEXT_INSN (insn) != 0)
2748 abort ();
2749 last_insn = insn;
2752 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2755 get_last_insn_anywhere ()
2757 struct sequence_stack *stack;
2758 if (last_insn)
2759 return last_insn;
2760 for (stack = seq_stack; stack; stack = stack->next)
2761 if (stack->last != 0)
2762 return stack->last;
2763 return 0;
2766 /* Return a number larger than any instruction's uid in this function. */
2769 get_max_uid ()
2771 return cur_insn_uid;
2774 /* Renumber instructions so that no instruction UIDs are wasted. */
2776 void
2777 renumber_insns (stream)
2778 FILE *stream;
2780 rtx insn;
2782 /* If we're not supposed to renumber instructions, don't. */
2783 if (!flag_renumber_insns)
2784 return;
2786 /* If there aren't that many instructions, then it's not really
2787 worth renumbering them. */
2788 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2789 return;
2791 cur_insn_uid = 1;
2793 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2795 if (stream)
2796 fprintf (stream, "Renumbering insn %d to %d\n",
2797 INSN_UID (insn), cur_insn_uid);
2798 INSN_UID (insn) = cur_insn_uid++;
2802 /* Return the next insn. If it is a SEQUENCE, return the first insn
2803 of the sequence. */
2806 next_insn (insn)
2807 rtx insn;
2809 if (insn)
2811 insn = NEXT_INSN (insn);
2812 if (insn && GET_CODE (insn) == INSN
2813 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814 insn = XVECEXP (PATTERN (insn), 0, 0);
2817 return insn;
2820 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2821 of the sequence. */
2824 previous_insn (insn)
2825 rtx insn;
2827 if (insn)
2829 insn = PREV_INSN (insn);
2830 if (insn && GET_CODE (insn) == INSN
2831 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2832 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2835 return insn;
2838 /* Return the next insn after INSN that is not a NOTE. This routine does not
2839 look inside SEQUENCEs. */
2842 next_nonnote_insn (insn)
2843 rtx insn;
2845 while (insn)
2847 insn = NEXT_INSN (insn);
2848 if (insn == 0 || GET_CODE (insn) != NOTE)
2849 break;
2852 return insn;
2855 /* Return the previous insn before INSN that is not a NOTE. This routine does
2856 not look inside SEQUENCEs. */
2859 prev_nonnote_insn (insn)
2860 rtx insn;
2862 while (insn)
2864 insn = PREV_INSN (insn);
2865 if (insn == 0 || GET_CODE (insn) != NOTE)
2866 break;
2869 return insn;
2872 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2873 or 0, if there is none. This routine does not look inside
2874 SEQUENCEs. */
2877 next_real_insn (insn)
2878 rtx insn;
2880 while (insn)
2882 insn = NEXT_INSN (insn);
2883 if (insn == 0 || GET_CODE (insn) == INSN
2884 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2885 break;
2888 return insn;
2891 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2892 or 0, if there is none. This routine does not look inside
2893 SEQUENCEs. */
2896 prev_real_insn (insn)
2897 rtx insn;
2899 while (insn)
2901 insn = PREV_INSN (insn);
2902 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2903 || GET_CODE (insn) == JUMP_INSN)
2904 break;
2907 return insn;
2910 /* Find the next insn after INSN that really does something. This routine
2911 does not look inside SEQUENCEs. Until reload has completed, this is the
2912 same as next_real_insn. */
2915 active_insn_p (insn)
2916 rtx insn;
2918 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2919 || (GET_CODE (insn) == INSN
2920 && (! reload_completed
2921 || (GET_CODE (PATTERN (insn)) != USE
2922 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2926 next_active_insn (insn)
2927 rtx insn;
2929 while (insn)
2931 insn = NEXT_INSN (insn);
2932 if (insn == 0 || active_insn_p (insn))
2933 break;
2936 return insn;
2939 /* Find the last insn before INSN that really does something. This routine
2940 does not look inside SEQUENCEs. Until reload has completed, this is the
2941 same as prev_real_insn. */
2944 prev_active_insn (insn)
2945 rtx insn;
2947 while (insn)
2949 insn = PREV_INSN (insn);
2950 if (insn == 0 || active_insn_p (insn))
2951 break;
2954 return insn;
2957 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2960 next_label (insn)
2961 rtx insn;
2963 while (insn)
2965 insn = NEXT_INSN (insn);
2966 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2967 break;
2970 return insn;
2973 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2976 prev_label (insn)
2977 rtx insn;
2979 while (insn)
2981 insn = PREV_INSN (insn);
2982 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2983 break;
2986 return insn;
2989 #ifdef HAVE_cc0
2990 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2991 and REG_CC_USER notes so we can find it. */
2993 void
2994 link_cc0_insns (insn)
2995 rtx insn;
2997 rtx user = next_nonnote_insn (insn);
2999 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3000 user = XVECEXP (PATTERN (user), 0, 0);
3002 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3003 REG_NOTES (user));
3004 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3007 /* Return the next insn that uses CC0 after INSN, which is assumed to
3008 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3009 applied to the result of this function should yield INSN).
3011 Normally, this is simply the next insn. However, if a REG_CC_USER note
3012 is present, it contains the insn that uses CC0.
3014 Return 0 if we can't find the insn. */
3017 next_cc0_user (insn)
3018 rtx insn;
3020 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3022 if (note)
3023 return XEXP (note, 0);
3025 insn = next_nonnote_insn (insn);
3026 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3027 insn = XVECEXP (PATTERN (insn), 0, 0);
3029 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3030 return insn;
3032 return 0;
3035 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3036 note, it is the previous insn. */
3039 prev_cc0_setter (insn)
3040 rtx insn;
3042 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3044 if (note)
3045 return XEXP (note, 0);
3047 insn = prev_nonnote_insn (insn);
3048 if (! sets_cc0_p (PATTERN (insn)))
3049 abort ();
3051 return insn;
3053 #endif
3055 /* Increment the label uses for all labels present in rtx. */
3057 static void
3058 mark_label_nuses (x)
3059 rtx x;
3061 enum rtx_code code;
3062 int i, j;
3063 const char *fmt;
3065 code = GET_CODE (x);
3066 if (code == LABEL_REF)
3067 LABEL_NUSES (XEXP (x, 0))++;
3069 fmt = GET_RTX_FORMAT (code);
3070 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3072 if (fmt[i] == 'e')
3073 mark_label_nuses (XEXP (x, i));
3074 else if (fmt[i] == 'E')
3075 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3076 mark_label_nuses (XVECEXP (x, i, j));
3081 /* Try splitting insns that can be split for better scheduling.
3082 PAT is the pattern which might split.
3083 TRIAL is the insn providing PAT.
3084 LAST is non-zero if we should return the last insn of the sequence produced.
3086 If this routine succeeds in splitting, it returns the first or last
3087 replacement insn depending on the value of LAST. Otherwise, it
3088 returns TRIAL. If the insn to be returned can be split, it will be. */
3091 try_split (pat, trial, last)
3092 rtx pat, trial;
3093 int last;
3095 rtx before = PREV_INSN (trial);
3096 rtx after = NEXT_INSN (trial);
3097 int has_barrier = 0;
3098 rtx tem;
3099 rtx note, seq;
3100 int probability;
3102 if (any_condjump_p (trial)
3103 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3104 split_branch_probability = INTVAL (XEXP (note, 0));
3105 probability = split_branch_probability;
3107 seq = split_insns (pat, trial);
3109 split_branch_probability = -1;
3111 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3112 We may need to handle this specially. */
3113 if (after && GET_CODE (after) == BARRIER)
3115 has_barrier = 1;
3116 after = NEXT_INSN (after);
3119 if (seq)
3121 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3122 The latter case will normally arise only when being done so that
3123 it, in turn, will be split (SFmode on the 29k is an example). */
3124 if (GET_CODE (seq) == SEQUENCE)
3126 int i, njumps = 0;
3128 /* Avoid infinite loop if any insn of the result matches
3129 the original pattern. */
3130 for (i = 0; i < XVECLEN (seq, 0); i++)
3131 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3132 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3133 return trial;
3135 /* Mark labels. */
3136 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3137 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3139 rtx insn = XVECEXP (seq, 0, i);
3140 mark_jump_label (PATTERN (insn),
3141 XVECEXP (seq, 0, i), 0);
3142 njumps++;
3143 if (probability != -1
3144 && any_condjump_p (insn)
3145 && !find_reg_note (insn, REG_BR_PROB, 0))
3147 /* We can preserve the REG_BR_PROB notes only if exactly
3148 one jump is created, otherwise the machine description
3149 is responsible for this step using
3150 split_branch_probability variable. */
3151 if (njumps != 1)
3152 abort ();
3153 REG_NOTES (insn)
3154 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3155 GEN_INT (probability),
3156 REG_NOTES (insn));
3160 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3161 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3162 if (GET_CODE (trial) == CALL_INSN)
3163 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3164 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3165 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3166 = CALL_INSN_FUNCTION_USAGE (trial);
3168 /* Copy notes, particularly those related to the CFG. */
3169 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3171 switch (REG_NOTE_KIND (note))
3173 case REG_EH_REGION:
3174 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3176 rtx insn = XVECEXP (seq, 0, i);
3177 if (GET_CODE (insn) == CALL_INSN
3178 || (flag_non_call_exceptions
3179 && may_trap_p (PATTERN (insn))))
3180 REG_NOTES (insn)
3181 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3182 XEXP (note, 0),
3183 REG_NOTES (insn));
3185 break;
3187 case REG_NORETURN:
3188 case REG_SETJMP:
3189 case REG_ALWAYS_RETURN:
3190 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3192 rtx insn = XVECEXP (seq, 0, i);
3193 if (GET_CODE (insn) == CALL_INSN)
3194 REG_NOTES (insn)
3195 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3196 XEXP (note, 0),
3197 REG_NOTES (insn));
3199 break;
3201 case REG_NON_LOCAL_GOTO:
3202 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3204 rtx insn = XVECEXP (seq, 0, i);
3205 if (GET_CODE (insn) == JUMP_INSN)
3206 REG_NOTES (insn)
3207 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3208 XEXP (note, 0),
3209 REG_NOTES (insn));
3211 break;
3213 default:
3214 break;
3218 /* If there are LABELS inside the split insns increment the
3219 usage count so we don't delete the label. */
3220 if (GET_CODE (trial) == INSN)
3221 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3222 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3223 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3225 tem = emit_insn_after (seq, trial);
3227 delete_insn (trial);
3228 if (has_barrier)
3229 emit_barrier_after (tem);
3231 /* Recursively call try_split for each new insn created; by the
3232 time control returns here that insn will be fully split, so
3233 set LAST and continue from the insn after the one returned.
3234 We can't use next_active_insn here since AFTER may be a note.
3235 Ignore deleted insns, which can be occur if not optimizing. */
3236 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3237 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3238 tem = try_split (PATTERN (tem), tem, 1);
3240 /* Avoid infinite loop if the result matches the original pattern. */
3241 else if (rtx_equal_p (seq, pat))
3242 return trial;
3243 else
3245 PATTERN (trial) = seq;
3246 INSN_CODE (trial) = -1;
3247 try_split (seq, trial, last);
3250 /* Return either the first or the last insn, depending on which was
3251 requested. */
3252 return last
3253 ? (after ? PREV_INSN (after) : last_insn)
3254 : NEXT_INSN (before);
3257 return trial;
3260 /* Make and return an INSN rtx, initializing all its slots.
3261 Store PATTERN in the pattern slots. */
3264 make_insn_raw (pattern)
3265 rtx pattern;
3267 rtx insn;
3269 insn = rtx_alloc (INSN);
3271 INSN_UID (insn) = cur_insn_uid++;
3272 PATTERN (insn) = pattern;
3273 INSN_CODE (insn) = -1;
3274 LOG_LINKS (insn) = NULL;
3275 REG_NOTES (insn) = NULL;
3277 #ifdef ENABLE_RTL_CHECKING
3278 if (insn
3279 && INSN_P (insn)
3280 && (returnjump_p (insn)
3281 || (GET_CODE (insn) == SET
3282 && SET_DEST (insn) == pc_rtx)))
3284 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3285 debug_rtx (insn);
3287 #endif
3289 return insn;
3292 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3294 static rtx
3295 make_jump_insn_raw (pattern)
3296 rtx pattern;
3298 rtx insn;
3300 insn = rtx_alloc (JUMP_INSN);
3301 INSN_UID (insn) = cur_insn_uid++;
3303 PATTERN (insn) = pattern;
3304 INSN_CODE (insn) = -1;
3305 LOG_LINKS (insn) = NULL;
3306 REG_NOTES (insn) = NULL;
3307 JUMP_LABEL (insn) = NULL;
3309 return insn;
3312 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3314 static rtx
3315 make_call_insn_raw (pattern)
3316 rtx pattern;
3318 rtx insn;
3320 insn = rtx_alloc (CALL_INSN);
3321 INSN_UID (insn) = cur_insn_uid++;
3323 PATTERN (insn) = pattern;
3324 INSN_CODE (insn) = -1;
3325 LOG_LINKS (insn) = NULL;
3326 REG_NOTES (insn) = NULL;
3327 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3329 return insn;
3332 /* Add INSN to the end of the doubly-linked list.
3333 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3335 void
3336 add_insn (insn)
3337 rtx insn;
3339 PREV_INSN (insn) = last_insn;
3340 NEXT_INSN (insn) = 0;
3342 if (NULL != last_insn)
3343 NEXT_INSN (last_insn) = insn;
3345 if (NULL == first_insn)
3346 first_insn = insn;
3348 last_insn = insn;
3351 /* Add INSN into the doubly-linked list after insn AFTER. This and
3352 the next should be the only functions called to insert an insn once
3353 delay slots have been filled since only they know how to update a
3354 SEQUENCE. */
3356 void
3357 add_insn_after (insn, after)
3358 rtx insn, after;
3360 rtx next = NEXT_INSN (after);
3361 basic_block bb;
3363 if (optimize && INSN_DELETED_P (after))
3364 abort ();
3366 NEXT_INSN (insn) = next;
3367 PREV_INSN (insn) = after;
3369 if (next)
3371 PREV_INSN (next) = insn;
3372 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3373 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3375 else if (last_insn == after)
3376 last_insn = insn;
3377 else
3379 struct sequence_stack *stack = seq_stack;
3380 /* Scan all pending sequences too. */
3381 for (; stack; stack = stack->next)
3382 if (after == stack->last)
3384 stack->last = insn;
3385 break;
3388 if (stack == 0)
3389 abort ();
3392 if (basic_block_for_insn
3393 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3394 && (bb = BLOCK_FOR_INSN (after)))
3396 set_block_for_insn (insn, bb);
3397 if (INSN_P (insn))
3398 bb->flags |= BB_DIRTY;
3399 /* Should not happen as first in the BB is always
3400 either NOTE or LABEL. */
3401 if (bb->end == after
3402 /* Avoid clobbering of structure when creating new BB. */
3403 && GET_CODE (insn) != BARRIER
3404 && (GET_CODE (insn) != NOTE
3405 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3406 bb->end = insn;
3409 NEXT_INSN (after) = insn;
3410 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3412 rtx sequence = PATTERN (after);
3413 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3417 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3418 the previous should be the only functions called to insert an insn once
3419 delay slots have been filled since only they know how to update a
3420 SEQUENCE. */
3422 void
3423 add_insn_before (insn, before)
3424 rtx insn, before;
3426 rtx prev = PREV_INSN (before);
3427 basic_block bb;
3429 if (optimize && INSN_DELETED_P (before))
3430 abort ();
3432 PREV_INSN (insn) = prev;
3433 NEXT_INSN (insn) = before;
3435 if (prev)
3437 NEXT_INSN (prev) = insn;
3438 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3440 rtx sequence = PATTERN (prev);
3441 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3444 else if (first_insn == before)
3445 first_insn = insn;
3446 else
3448 struct sequence_stack *stack = seq_stack;
3449 /* Scan all pending sequences too. */
3450 for (; stack; stack = stack->next)
3451 if (before == stack->first)
3453 stack->first = insn;
3454 break;
3457 if (stack == 0)
3458 abort ();
3461 if (basic_block_for_insn
3462 && (unsigned int) INSN_UID (before) < basic_block_for_insn->num_elements
3463 && (bb = BLOCK_FOR_INSN (before)))
3465 set_block_for_insn (insn, bb);
3466 if (INSN_P (insn))
3467 bb->flags |= BB_DIRTY;
3468 /* Should not happen as first in the BB is always
3469 either NOTE or LABEl. */
3470 if (bb->head == insn
3471 /* Avoid clobbering of structure when creating new BB. */
3472 && GET_CODE (insn) != BARRIER
3473 && (GET_CODE (insn) != NOTE
3474 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3475 abort ();
3478 PREV_INSN (before) = insn;
3479 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3480 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3483 /* Remove an insn from its doubly-linked list. This function knows how
3484 to handle sequences. */
3485 void
3486 remove_insn (insn)
3487 rtx insn;
3489 rtx next = NEXT_INSN (insn);
3490 rtx prev = PREV_INSN (insn);
3491 basic_block bb;
3493 if (prev)
3495 NEXT_INSN (prev) = next;
3496 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3498 rtx sequence = PATTERN (prev);
3499 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3502 else if (first_insn == insn)
3503 first_insn = next;
3504 else
3506 struct sequence_stack *stack = seq_stack;
3507 /* Scan all pending sequences too. */
3508 for (; stack; stack = stack->next)
3509 if (insn == stack->first)
3511 stack->first = next;
3512 break;
3515 if (stack == 0)
3516 abort ();
3519 if (next)
3521 PREV_INSN (next) = prev;
3522 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3523 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3525 else if (last_insn == insn)
3526 last_insn = prev;
3527 else
3529 struct sequence_stack *stack = seq_stack;
3530 /* Scan all pending sequences too. */
3531 for (; stack; stack = stack->next)
3532 if (insn == stack->last)
3534 stack->last = prev;
3535 break;
3538 if (stack == 0)
3539 abort ();
3541 if (basic_block_for_insn
3542 && (unsigned int) INSN_UID (insn) < basic_block_for_insn->num_elements
3543 && (bb = BLOCK_FOR_INSN (insn)))
3545 if (INSN_P (insn))
3546 bb->flags |= BB_DIRTY;
3547 if (bb->head == insn)
3549 /* Never ever delete the basic block note without deleting whole
3550 basic block. */
3551 if (GET_CODE (insn) == NOTE)
3552 abort ();
3553 bb->head = next;
3555 if (bb->end == insn)
3556 bb->end = prev;
3560 /* Delete all insns made since FROM.
3561 FROM becomes the new last instruction. */
3563 void
3564 delete_insns_since (from)
3565 rtx from;
3567 if (from == 0)
3568 first_insn = 0;
3569 else
3570 NEXT_INSN (from) = 0;
3571 last_insn = from;
3574 /* This function is deprecated, please use sequences instead.
3576 Move a consecutive bunch of insns to a different place in the chain.
3577 The insns to be moved are those between FROM and TO.
3578 They are moved to a new position after the insn AFTER.
3579 AFTER must not be FROM or TO or any insn in between.
3581 This function does not know about SEQUENCEs and hence should not be
3582 called after delay-slot filling has been done. */
3584 void
3585 reorder_insns_nobb (from, to, after)
3586 rtx from, to, after;
3588 /* Splice this bunch out of where it is now. */
3589 if (PREV_INSN (from))
3590 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3591 if (NEXT_INSN (to))
3592 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3593 if (last_insn == to)
3594 last_insn = PREV_INSN (from);
3595 if (first_insn == from)
3596 first_insn = NEXT_INSN (to);
3598 /* Make the new neighbors point to it and it to them. */
3599 if (NEXT_INSN (after))
3600 PREV_INSN (NEXT_INSN (after)) = to;
3602 NEXT_INSN (to) = NEXT_INSN (after);
3603 PREV_INSN (from) = after;
3604 NEXT_INSN (after) = from;
3605 if (after == last_insn)
3606 last_insn = to;
3609 /* Same as function above, but take care to update BB boundaries. */
3610 void
3611 reorder_insns (from, to, after)
3612 rtx from, to, after;
3614 rtx prev = PREV_INSN (from);
3615 basic_block bb, bb2;
3617 reorder_insns_nobb (from, to, after);
3619 if (basic_block_for_insn
3620 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3621 && (bb = BLOCK_FOR_INSN (after)))
3623 rtx x;
3624 bb->flags |= BB_DIRTY;
3626 if (basic_block_for_insn
3627 && ((unsigned int) INSN_UID (from)
3628 < basic_block_for_insn->num_elements)
3629 && (bb2 = BLOCK_FOR_INSN (from)))
3631 if (bb2->end == to)
3632 bb2->end = prev;
3633 bb2->flags |= BB_DIRTY;
3636 if (bb->end == after)
3637 bb->end = to;
3639 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3640 set_block_for_insn (x, bb);
3644 /* Return the line note insn preceding INSN. */
3646 static rtx
3647 find_line_note (insn)
3648 rtx insn;
3650 if (no_line_numbers)
3651 return 0;
3653 for (; insn; insn = PREV_INSN (insn))
3654 if (GET_CODE (insn) == NOTE
3655 && NOTE_LINE_NUMBER (insn) >= 0)
3656 break;
3658 return insn;
3661 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3662 of the moved insns when debugging. This may insert a note between AFTER
3663 and FROM, and another one after TO. */
3665 void
3666 reorder_insns_with_line_notes (from, to, after)
3667 rtx from, to, after;
3669 rtx from_line = find_line_note (from);
3670 rtx after_line = find_line_note (after);
3672 reorder_insns (from, to, after);
3674 if (from_line == after_line)
3675 return;
3677 if (from_line)
3678 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3679 NOTE_LINE_NUMBER (from_line),
3680 after);
3681 if (after_line)
3682 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3683 NOTE_LINE_NUMBER (after_line),
3684 to);
3687 /* Remove unnecessary notes from the instruction stream. */
3689 void
3690 remove_unnecessary_notes ()
3692 rtx block_stack = NULL_RTX;
3693 rtx eh_stack = NULL_RTX;
3694 rtx insn;
3695 rtx next;
3696 rtx tmp;
3698 /* We must not remove the first instruction in the function because
3699 the compiler depends on the first instruction being a note. */
3700 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3702 /* Remember what's next. */
3703 next = NEXT_INSN (insn);
3705 /* We're only interested in notes. */
3706 if (GET_CODE (insn) != NOTE)
3707 continue;
3709 switch (NOTE_LINE_NUMBER (insn))
3711 case NOTE_INSN_DELETED:
3712 case NOTE_INSN_LOOP_END_TOP_COND:
3713 remove_insn (insn);
3714 break;
3716 case NOTE_INSN_EH_REGION_BEG:
3717 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3718 break;
3720 case NOTE_INSN_EH_REGION_END:
3721 /* Too many end notes. */
3722 if (eh_stack == NULL_RTX)
3723 abort ();
3724 /* Mismatched nesting. */
3725 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3726 abort ();
3727 tmp = eh_stack;
3728 eh_stack = XEXP (eh_stack, 1);
3729 free_INSN_LIST_node (tmp);
3730 break;
3732 case NOTE_INSN_BLOCK_BEG:
3733 /* By now, all notes indicating lexical blocks should have
3734 NOTE_BLOCK filled in. */
3735 if (NOTE_BLOCK (insn) == NULL_TREE)
3736 abort ();
3737 block_stack = alloc_INSN_LIST (insn, block_stack);
3738 break;
3740 case NOTE_INSN_BLOCK_END:
3741 /* Too many end notes. */
3742 if (block_stack == NULL_RTX)
3743 abort ();
3744 /* Mismatched nesting. */
3745 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3746 abort ();
3747 tmp = block_stack;
3748 block_stack = XEXP (block_stack, 1);
3749 free_INSN_LIST_node (tmp);
3751 /* Scan back to see if there are any non-note instructions
3752 between INSN and the beginning of this block. If not,
3753 then there is no PC range in the generated code that will
3754 actually be in this block, so there's no point in
3755 remembering the existence of the block. */
3756 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3758 /* This block contains a real instruction. Note that we
3759 don't include labels; if the only thing in the block
3760 is a label, then there are still no PC values that
3761 lie within the block. */
3762 if (INSN_P (tmp))
3763 break;
3765 /* We're only interested in NOTEs. */
3766 if (GET_CODE (tmp) != NOTE)
3767 continue;
3769 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3771 /* We just verified that this BLOCK matches us with
3772 the block_stack check above. Never delete the
3773 BLOCK for the outermost scope of the function; we
3774 can refer to names from that scope even if the
3775 block notes are messed up. */
3776 if (! is_body_block (NOTE_BLOCK (insn))
3777 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3779 remove_insn (tmp);
3780 remove_insn (insn);
3782 break;
3784 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3785 /* There's a nested block. We need to leave the
3786 current block in place since otherwise the debugger
3787 wouldn't be able to show symbols from our block in
3788 the nested block. */
3789 break;
3794 /* Too many begin notes. */
3795 if (block_stack || eh_stack)
3796 abort ();
3800 /* Emit an insn of given code and pattern
3801 at a specified place within the doubly-linked list. */
3803 /* Make an instruction with body PATTERN
3804 and output it before the instruction BEFORE. */
3807 emit_insn_before (pattern, before)
3808 rtx pattern, before;
3810 rtx insn = before;
3812 if (GET_CODE (pattern) == SEQUENCE)
3814 int i;
3816 for (i = 0; i < XVECLEN (pattern, 0); i++)
3818 insn = XVECEXP (pattern, 0, i);
3819 add_insn_before (insn, before);
3822 else
3824 insn = make_insn_raw (pattern);
3825 add_insn_before (insn, before);
3828 return insn;
3831 /* Make an instruction with body PATTERN and code JUMP_INSN
3832 and output it before the instruction BEFORE. */
3835 emit_jump_insn_before (pattern, before)
3836 rtx pattern, before;
3838 rtx insn;
3840 if (GET_CODE (pattern) == SEQUENCE)
3841 insn = emit_insn_before (pattern, before);
3842 else
3844 insn = make_jump_insn_raw (pattern);
3845 add_insn_before (insn, before);
3848 return insn;
3851 /* Make an instruction with body PATTERN and code CALL_INSN
3852 and output it before the instruction BEFORE. */
3855 emit_call_insn_before (pattern, before)
3856 rtx pattern, before;
3858 rtx insn;
3860 if (GET_CODE (pattern) == SEQUENCE)
3861 insn = emit_insn_before (pattern, before);
3862 else
3864 insn = make_call_insn_raw (pattern);
3865 add_insn_before (insn, before);
3866 PUT_CODE (insn, CALL_INSN);
3869 return insn;
3872 /* Make an instruction with body PATTERN and code CALL_INSN
3873 and output it before the instruction BEFORE. */
3876 emit_call_insn_after (pattern, before)
3877 rtx pattern, before;
3879 rtx insn;
3881 if (GET_CODE (pattern) == SEQUENCE)
3882 insn = emit_insn_after (pattern, before);
3883 else
3885 insn = make_call_insn_raw (pattern);
3886 add_insn_after (insn, before);
3887 PUT_CODE (insn, CALL_INSN);
3890 return insn;
3893 /* Make an insn of code BARRIER
3894 and output it before the insn BEFORE. */
3897 emit_barrier_before (before)
3898 rtx before;
3900 rtx insn = rtx_alloc (BARRIER);
3902 INSN_UID (insn) = cur_insn_uid++;
3904 add_insn_before (insn, before);
3905 return insn;
3908 /* Emit the label LABEL before the insn BEFORE. */
3911 emit_label_before (label, before)
3912 rtx label, before;
3914 /* This can be called twice for the same label as a result of the
3915 confusion that follows a syntax error! So make it harmless. */
3916 if (INSN_UID (label) == 0)
3918 INSN_UID (label) = cur_insn_uid++;
3919 add_insn_before (label, before);
3922 return label;
3925 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3928 emit_note_before (subtype, before)
3929 int subtype;
3930 rtx before;
3932 rtx note = rtx_alloc (NOTE);
3933 INSN_UID (note) = cur_insn_uid++;
3934 NOTE_SOURCE_FILE (note) = 0;
3935 NOTE_LINE_NUMBER (note) = subtype;
3937 add_insn_before (note, before);
3938 return note;
3941 /* Make an insn of code INSN with body PATTERN
3942 and output it after the insn AFTER. */
3945 emit_insn_after (pattern, after)
3946 rtx pattern, after;
3948 rtx insn = after;
3950 if (GET_CODE (pattern) == SEQUENCE)
3952 int i;
3954 for (i = 0; i < XVECLEN (pattern, 0); i++)
3956 insn = XVECEXP (pattern, 0, i);
3957 add_insn_after (insn, after);
3958 after = insn;
3961 else
3963 insn = make_insn_raw (pattern);
3964 add_insn_after (insn, after);
3967 return insn;
3970 /* Similar to emit_insn_after, except that line notes are to be inserted so
3971 as to act as if this insn were at FROM. */
3973 void
3974 emit_insn_after_with_line_notes (pattern, after, from)
3975 rtx pattern, after, from;
3977 rtx from_line = find_line_note (from);
3978 rtx after_line = find_line_note (after);
3979 rtx insn = emit_insn_after (pattern, after);
3981 if (from_line)
3982 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3983 NOTE_LINE_NUMBER (from_line),
3984 after);
3986 if (after_line)
3987 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3988 NOTE_LINE_NUMBER (after_line),
3989 insn);
3992 /* Make an insn of code JUMP_INSN with body PATTERN
3993 and output it after the insn AFTER. */
3996 emit_jump_insn_after (pattern, after)
3997 rtx pattern, after;
3999 rtx insn;
4001 if (GET_CODE (pattern) == SEQUENCE)
4002 insn = emit_insn_after (pattern, after);
4003 else
4005 insn = make_jump_insn_raw (pattern);
4006 add_insn_after (insn, after);
4009 return insn;
4012 /* Make an insn of code BARRIER
4013 and output it after the insn AFTER. */
4016 emit_barrier_after (after)
4017 rtx after;
4019 rtx insn = rtx_alloc (BARRIER);
4021 INSN_UID (insn) = cur_insn_uid++;
4023 add_insn_after (insn, after);
4024 return insn;
4027 /* Emit the label LABEL after the insn AFTER. */
4030 emit_label_after (label, after)
4031 rtx label, after;
4033 /* This can be called twice for the same label
4034 as a result of the confusion that follows a syntax error!
4035 So make it harmless. */
4036 if (INSN_UID (label) == 0)
4038 INSN_UID (label) = cur_insn_uid++;
4039 add_insn_after (label, after);
4042 return label;
4045 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4048 emit_note_after (subtype, after)
4049 int subtype;
4050 rtx after;
4052 rtx note = rtx_alloc (NOTE);
4053 INSN_UID (note) = cur_insn_uid++;
4054 NOTE_SOURCE_FILE (note) = 0;
4055 NOTE_LINE_NUMBER (note) = subtype;
4056 add_insn_after (note, after);
4057 return note;
4060 /* Emit a line note for FILE and LINE after the insn AFTER. */
4063 emit_line_note_after (file, line, after)
4064 const char *file;
4065 int line;
4066 rtx after;
4068 rtx note;
4070 if (no_line_numbers && line > 0)
4072 cur_insn_uid++;
4073 return 0;
4076 note = rtx_alloc (NOTE);
4077 INSN_UID (note) = cur_insn_uid++;
4078 NOTE_SOURCE_FILE (note) = file;
4079 NOTE_LINE_NUMBER (note) = line;
4080 add_insn_after (note, after);
4081 return note;
4084 /* Make an insn of code INSN with pattern PATTERN
4085 and add it to the end of the doubly-linked list.
4086 If PATTERN is a SEQUENCE, take the elements of it
4087 and emit an insn for each element.
4089 Returns the last insn emitted. */
4092 emit_insn (pattern)
4093 rtx pattern;
4095 rtx insn = last_insn;
4097 if (GET_CODE (pattern) == SEQUENCE)
4099 int i;
4101 for (i = 0; i < XVECLEN (pattern, 0); i++)
4103 insn = XVECEXP (pattern, 0, i);
4104 add_insn (insn);
4107 else
4109 insn = make_insn_raw (pattern);
4110 add_insn (insn);
4113 return insn;
4116 /* Emit the insns in a chain starting with INSN.
4117 Return the last insn emitted. */
4120 emit_insns (insn)
4121 rtx insn;
4123 rtx last = 0;
4125 while (insn)
4127 rtx next = NEXT_INSN (insn);
4128 add_insn (insn);
4129 last = insn;
4130 insn = next;
4133 return last;
4136 /* Emit the insns in a chain starting with INSN and place them in front of
4137 the insn BEFORE. Return the last insn emitted. */
4140 emit_insns_before (insn, before)
4141 rtx insn;
4142 rtx before;
4144 rtx last = 0;
4146 while (insn)
4148 rtx next = NEXT_INSN (insn);
4149 add_insn_before (insn, before);
4150 last = insn;
4151 insn = next;
4154 return last;
4157 /* Emit the insns in a chain starting with FIRST and place them in back of
4158 the insn AFTER. Return the last insn emitted. */
4161 emit_insns_after (first, after)
4162 rtx first;
4163 rtx after;
4165 rtx last;
4166 rtx after_after;
4167 basic_block bb;
4169 if (!after)
4170 abort ();
4172 if (!first)
4173 return after;
4175 if (basic_block_for_insn
4176 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
4177 && (bb = BLOCK_FOR_INSN (after)))
4179 bb->flags |= BB_DIRTY;
4180 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4181 set_block_for_insn (last, bb);
4182 set_block_for_insn (last, bb);
4183 if (bb->end == after)
4184 bb->end = last;
4186 else
4187 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4188 continue;
4190 after_after = NEXT_INSN (after);
4192 NEXT_INSN (after) = first;
4193 PREV_INSN (first) = after;
4194 NEXT_INSN (last) = after_after;
4195 if (after_after)
4196 PREV_INSN (after_after) = last;
4198 if (after == last_insn)
4199 last_insn = last;
4200 return last;
4203 /* Make an insn of code JUMP_INSN with pattern PATTERN
4204 and add it to the end of the doubly-linked list. */
4207 emit_jump_insn (pattern)
4208 rtx pattern;
4210 if (GET_CODE (pattern) == SEQUENCE)
4211 return emit_insn (pattern);
4212 else
4214 rtx insn = make_jump_insn_raw (pattern);
4215 add_insn (insn);
4216 return insn;
4220 /* Make an insn of code CALL_INSN with pattern PATTERN
4221 and add it to the end of the doubly-linked list. */
4224 emit_call_insn (pattern)
4225 rtx pattern;
4227 if (GET_CODE (pattern) == SEQUENCE)
4228 return emit_insn (pattern);
4229 else
4231 rtx insn = make_call_insn_raw (pattern);
4232 add_insn (insn);
4233 PUT_CODE (insn, CALL_INSN);
4234 return insn;
4238 /* Add the label LABEL to the end of the doubly-linked list. */
4241 emit_label (label)
4242 rtx label;
4244 /* This can be called twice for the same label
4245 as a result of the confusion that follows a syntax error!
4246 So make it harmless. */
4247 if (INSN_UID (label) == 0)
4249 INSN_UID (label) = cur_insn_uid++;
4250 add_insn (label);
4252 return label;
4255 /* Make an insn of code BARRIER
4256 and add it to the end of the doubly-linked list. */
4259 emit_barrier ()
4261 rtx barrier = rtx_alloc (BARRIER);
4262 INSN_UID (barrier) = cur_insn_uid++;
4263 add_insn (barrier);
4264 return barrier;
4267 /* Make an insn of code NOTE
4268 with data-fields specified by FILE and LINE
4269 and add it to the end of the doubly-linked list,
4270 but only if line-numbers are desired for debugging info. */
4273 emit_line_note (file, line)
4274 const char *file;
4275 int line;
4277 set_file_and_line_for_stmt (file, line);
4279 #if 0
4280 if (no_line_numbers)
4281 return 0;
4282 #endif
4284 return emit_note (file, line);
4287 /* Make an insn of code NOTE
4288 with data-fields specified by FILE and LINE
4289 and add it to the end of the doubly-linked list.
4290 If it is a line-number NOTE, omit it if it matches the previous one. */
4293 emit_note (file, line)
4294 const char *file;
4295 int line;
4297 rtx note;
4299 if (line > 0)
4301 if (file && last_filename && !strcmp (file, last_filename)
4302 && line == last_linenum)
4303 return 0;
4304 last_filename = file;
4305 last_linenum = line;
4308 if (no_line_numbers && line > 0)
4310 cur_insn_uid++;
4311 return 0;
4314 note = rtx_alloc (NOTE);
4315 INSN_UID (note) = cur_insn_uid++;
4316 NOTE_SOURCE_FILE (note) = file;
4317 NOTE_LINE_NUMBER (note) = line;
4318 add_insn (note);
4319 return note;
4322 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4325 emit_line_note_force (file, line)
4326 const char *file;
4327 int line;
4329 last_linenum = -1;
4330 return emit_line_note (file, line);
4333 /* Cause next statement to emit a line note even if the line number
4334 has not changed. This is used at the beginning of a function. */
4336 void
4337 force_next_line_note ()
4339 last_linenum = -1;
4342 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4343 note of this type already exists, remove it first. */
4346 set_unique_reg_note (insn, kind, datum)
4347 rtx insn;
4348 enum reg_note kind;
4349 rtx datum;
4351 rtx note = find_reg_note (insn, kind, NULL_RTX);
4353 switch (kind)
4355 case REG_EQUAL:
4356 case REG_EQUIV:
4357 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4358 has multiple sets (some callers assume single_set
4359 means the insn only has one set, when in fact it
4360 means the insn only has one * useful * set). */
4361 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4363 if (note)
4364 abort ();
4365 return NULL_RTX;
4368 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4369 It serves no useful purpose and breaks eliminate_regs. */
4370 if (GET_CODE (datum) == ASM_OPERANDS)
4371 return NULL_RTX;
4372 break;
4374 default:
4375 break;
4378 if (note)
4380 XEXP (note, 0) = datum;
4381 return note;
4384 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4385 return REG_NOTES (insn);
4388 /* Return an indication of which type of insn should have X as a body.
4389 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4391 enum rtx_code
4392 classify_insn (x)
4393 rtx x;
4395 if (GET_CODE (x) == CODE_LABEL)
4396 return CODE_LABEL;
4397 if (GET_CODE (x) == CALL)
4398 return CALL_INSN;
4399 if (GET_CODE (x) == RETURN)
4400 return JUMP_INSN;
4401 if (GET_CODE (x) == SET)
4403 if (SET_DEST (x) == pc_rtx)
4404 return JUMP_INSN;
4405 else if (GET_CODE (SET_SRC (x)) == CALL)
4406 return CALL_INSN;
4407 else
4408 return INSN;
4410 if (GET_CODE (x) == PARALLEL)
4412 int j;
4413 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4414 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4415 return CALL_INSN;
4416 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4417 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4418 return JUMP_INSN;
4419 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4420 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4421 return CALL_INSN;
4423 return INSN;
4426 /* Emit the rtl pattern X as an appropriate kind of insn.
4427 If X is a label, it is simply added into the insn chain. */
4430 emit (x)
4431 rtx x;
4433 enum rtx_code code = classify_insn (x);
4435 if (code == CODE_LABEL)
4436 return emit_label (x);
4437 else if (code == INSN)
4438 return emit_insn (x);
4439 else if (code == JUMP_INSN)
4441 rtx insn = emit_jump_insn (x);
4442 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4443 return emit_barrier ();
4444 return insn;
4446 else if (code == CALL_INSN)
4447 return emit_call_insn (x);
4448 else
4449 abort ();
4452 /* Begin emitting insns to a sequence which can be packaged in an
4453 RTL_EXPR. If this sequence will contain something that might cause
4454 the compiler to pop arguments to function calls (because those
4455 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4456 details), use do_pending_stack_adjust before calling this function.
4457 That will ensure that the deferred pops are not accidentally
4458 emitted in the middle of this sequence. */
4460 void
4461 start_sequence ()
4463 struct sequence_stack *tem;
4465 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4467 tem->next = seq_stack;
4468 tem->first = first_insn;
4469 tem->last = last_insn;
4470 tem->sequence_rtl_expr = seq_rtl_expr;
4472 seq_stack = tem;
4474 first_insn = 0;
4475 last_insn = 0;
4478 /* Similarly, but indicate that this sequence will be placed in T, an
4479 RTL_EXPR. See the documentation for start_sequence for more
4480 information about how to use this function. */
4482 void
4483 start_sequence_for_rtl_expr (t)
4484 tree t;
4486 start_sequence ();
4488 seq_rtl_expr = t;
4491 /* Set up the insn chain starting with FIRST as the current sequence,
4492 saving the previously current one. See the documentation for
4493 start_sequence for more information about how to use this function. */
4495 void
4496 push_to_sequence (first)
4497 rtx first;
4499 rtx last;
4501 start_sequence ();
4503 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4505 first_insn = first;
4506 last_insn = last;
4509 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4511 void
4512 push_to_full_sequence (first, last)
4513 rtx first, last;
4515 start_sequence ();
4516 first_insn = first;
4517 last_insn = last;
4518 /* We really should have the end of the insn chain here. */
4519 if (last && NEXT_INSN (last))
4520 abort ();
4523 /* Set up the outer-level insn chain
4524 as the current sequence, saving the previously current one. */
4526 void
4527 push_topmost_sequence ()
4529 struct sequence_stack *stack, *top = NULL;
4531 start_sequence ();
4533 for (stack = seq_stack; stack; stack = stack->next)
4534 top = stack;
4536 first_insn = top->first;
4537 last_insn = top->last;
4538 seq_rtl_expr = top->sequence_rtl_expr;
4541 /* After emitting to the outer-level insn chain, update the outer-level
4542 insn chain, and restore the previous saved state. */
4544 void
4545 pop_topmost_sequence ()
4547 struct sequence_stack *stack, *top = NULL;
4549 for (stack = seq_stack; stack; stack = stack->next)
4550 top = stack;
4552 top->first = first_insn;
4553 top->last = last_insn;
4554 /* ??? Why don't we save seq_rtl_expr here? */
4556 end_sequence ();
4559 /* After emitting to a sequence, restore previous saved state.
4561 To get the contents of the sequence just made, you must call
4562 `gen_sequence' *before* calling here.
4564 If the compiler might have deferred popping arguments while
4565 generating this sequence, and this sequence will not be immediately
4566 inserted into the instruction stream, use do_pending_stack_adjust
4567 before calling gen_sequence. That will ensure that the deferred
4568 pops are inserted into this sequence, and not into some random
4569 location in the instruction stream. See INHIBIT_DEFER_POP for more
4570 information about deferred popping of arguments. */
4572 void
4573 end_sequence ()
4575 struct sequence_stack *tem = seq_stack;
4577 first_insn = tem->first;
4578 last_insn = tem->last;
4579 seq_rtl_expr = tem->sequence_rtl_expr;
4580 seq_stack = tem->next;
4582 free (tem);
4585 /* This works like end_sequence, but records the old sequence in FIRST
4586 and LAST. */
4588 void
4589 end_full_sequence (first, last)
4590 rtx *first, *last;
4592 *first = first_insn;
4593 *last = last_insn;
4594 end_sequence ();
4597 /* Return 1 if currently emitting into a sequence. */
4600 in_sequence_p ()
4602 return seq_stack != 0;
4605 /* Generate a SEQUENCE rtx containing the insns already emitted
4606 to the current sequence.
4608 This is how the gen_... function from a DEFINE_EXPAND
4609 constructs the SEQUENCE that it returns. */
4612 gen_sequence ()
4614 rtx result;
4615 rtx tem;
4616 int i;
4617 int len;
4619 /* Count the insns in the chain. */
4620 len = 0;
4621 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4622 len++;
4624 /* If only one insn, return it rather than a SEQUENCE.
4625 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4626 the case of an empty list.)
4627 We only return the pattern of an insn if its code is INSN and it
4628 has no notes. This ensures that no information gets lost. */
4629 if (len == 1
4630 && GET_CODE (first_insn) == INSN
4631 && ! RTX_FRAME_RELATED_P (first_insn)
4632 /* Don't throw away any reg notes. */
4633 && REG_NOTES (first_insn) == 0)
4634 return PATTERN (first_insn);
4636 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4638 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4639 XVECEXP (result, 0, i) = tem;
4641 return result;
4644 /* Put the various virtual registers into REGNO_REG_RTX. */
4646 void
4647 init_virtual_regs (es)
4648 struct emit_status *es;
4650 rtx *ptr = es->x_regno_reg_rtx;
4651 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4652 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4653 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4654 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4655 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4658 void
4659 clear_emit_caches ()
4661 int i;
4663 /* Clear the start_sequence/gen_sequence cache. */
4664 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4665 sequence_result[i] = 0;
4666 free_insn = 0;
4669 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4670 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4671 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4672 static int copy_insn_n_scratches;
4674 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4675 copied an ASM_OPERANDS.
4676 In that case, it is the original input-operand vector. */
4677 static rtvec orig_asm_operands_vector;
4679 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4680 copied an ASM_OPERANDS.
4681 In that case, it is the copied input-operand vector. */
4682 static rtvec copy_asm_operands_vector;
4684 /* Likewise for the constraints vector. */
4685 static rtvec orig_asm_constraints_vector;
4686 static rtvec copy_asm_constraints_vector;
4688 /* Recursively create a new copy of an rtx for copy_insn.
4689 This function differs from copy_rtx in that it handles SCRATCHes and
4690 ASM_OPERANDs properly.
4691 Normally, this function is not used directly; use copy_insn as front end.
4692 However, you could first copy an insn pattern with copy_insn and then use
4693 this function afterwards to properly copy any REG_NOTEs containing
4694 SCRATCHes. */
4697 copy_insn_1 (orig)
4698 rtx orig;
4700 rtx copy;
4701 int i, j;
4702 RTX_CODE code;
4703 const char *format_ptr;
4705 code = GET_CODE (orig);
4707 switch (code)
4709 case REG:
4710 case QUEUED:
4711 case CONST_INT:
4712 case CONST_DOUBLE:
4713 case CONST_VECTOR:
4714 case SYMBOL_REF:
4715 case CODE_LABEL:
4716 case PC:
4717 case CC0:
4718 case ADDRESSOF:
4719 return orig;
4721 case SCRATCH:
4722 for (i = 0; i < copy_insn_n_scratches; i++)
4723 if (copy_insn_scratch_in[i] == orig)
4724 return copy_insn_scratch_out[i];
4725 break;
4727 case CONST:
4728 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4729 a LABEL_REF, it isn't sharable. */
4730 if (GET_CODE (XEXP (orig, 0)) == PLUS
4731 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4732 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4733 return orig;
4734 break;
4736 /* A MEM with a constant address is not sharable. The problem is that
4737 the constant address may need to be reloaded. If the mem is shared,
4738 then reloading one copy of this mem will cause all copies to appear
4739 to have been reloaded. */
4741 default:
4742 break;
4745 copy = rtx_alloc (code);
4747 /* Copy the various flags, and other information. We assume that
4748 all fields need copying, and then clear the fields that should
4749 not be copied. That is the sensible default behavior, and forces
4750 us to explicitly document why we are *not* copying a flag. */
4751 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4753 /* We do not copy the USED flag, which is used as a mark bit during
4754 walks over the RTL. */
4755 RTX_FLAG (copy, used) = 0;
4757 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4758 if (GET_RTX_CLASS (code) == 'i')
4760 RTX_FLAG (copy, jump) = 0;
4761 RTX_FLAG (copy, call) = 0;
4762 RTX_FLAG (copy, frame_related) = 0;
4765 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4767 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4769 copy->fld[i] = orig->fld[i];
4770 switch (*format_ptr++)
4772 case 'e':
4773 if (XEXP (orig, i) != NULL)
4774 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4775 break;
4777 case 'E':
4778 case 'V':
4779 if (XVEC (orig, i) == orig_asm_constraints_vector)
4780 XVEC (copy, i) = copy_asm_constraints_vector;
4781 else if (XVEC (orig, i) == orig_asm_operands_vector)
4782 XVEC (copy, i) = copy_asm_operands_vector;
4783 else if (XVEC (orig, i) != NULL)
4785 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4786 for (j = 0; j < XVECLEN (copy, i); j++)
4787 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4789 break;
4791 case 't':
4792 case 'w':
4793 case 'i':
4794 case 's':
4795 case 'S':
4796 case 'u':
4797 case '0':
4798 /* These are left unchanged. */
4799 break;
4801 default:
4802 abort ();
4806 if (code == SCRATCH)
4808 i = copy_insn_n_scratches++;
4809 if (i >= MAX_RECOG_OPERANDS)
4810 abort ();
4811 copy_insn_scratch_in[i] = orig;
4812 copy_insn_scratch_out[i] = copy;
4814 else if (code == ASM_OPERANDS)
4816 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4817 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4818 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4819 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4822 return copy;
4825 /* Create a new copy of an rtx.
4826 This function differs from copy_rtx in that it handles SCRATCHes and
4827 ASM_OPERANDs properly.
4828 INSN doesn't really have to be a full INSN; it could be just the
4829 pattern. */
4831 copy_insn (insn)
4832 rtx insn;
4834 copy_insn_n_scratches = 0;
4835 orig_asm_operands_vector = 0;
4836 orig_asm_constraints_vector = 0;
4837 copy_asm_operands_vector = 0;
4838 copy_asm_constraints_vector = 0;
4839 return copy_insn_1 (insn);
4842 /* Initialize data structures and variables in this file
4843 before generating rtl for each function. */
4845 void
4846 init_emit ()
4848 struct function *f = cfun;
4850 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4851 first_insn = NULL;
4852 last_insn = NULL;
4853 seq_rtl_expr = NULL;
4854 cur_insn_uid = 1;
4855 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4856 last_linenum = 0;
4857 last_filename = 0;
4858 first_label_num = label_num;
4859 last_label_num = 0;
4860 seq_stack = NULL;
4862 clear_emit_caches ();
4864 /* Init the tables that describe all the pseudo regs. */
4866 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4868 f->emit->regno_pointer_align
4869 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4870 sizeof (unsigned char));
4872 regno_reg_rtx
4873 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4875 f->emit->regno_decl
4876 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4878 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4879 init_virtual_regs (f->emit);
4881 /* Indicate that the virtual registers and stack locations are
4882 all pointers. */
4883 REG_POINTER (stack_pointer_rtx) = 1;
4884 REG_POINTER (frame_pointer_rtx) = 1;
4885 REG_POINTER (hard_frame_pointer_rtx) = 1;
4886 REG_POINTER (arg_pointer_rtx) = 1;
4888 REG_POINTER (virtual_incoming_args_rtx) = 1;
4889 REG_POINTER (virtual_stack_vars_rtx) = 1;
4890 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4891 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4892 REG_POINTER (virtual_cfa_rtx) = 1;
4894 #ifdef STACK_BOUNDARY
4895 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4896 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4897 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4898 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4900 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4901 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4902 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4903 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4904 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4905 #endif
4907 #ifdef INIT_EXPANDERS
4908 INIT_EXPANDERS;
4909 #endif
4912 /* Mark SS for GC. */
4914 static void
4915 mark_sequence_stack (ss)
4916 struct sequence_stack *ss;
4918 while (ss)
4920 ggc_mark_rtx (ss->first);
4921 ggc_mark_tree (ss->sequence_rtl_expr);
4922 ss = ss->next;
4926 /* Mark ES for GC. */
4928 void
4929 mark_emit_status (es)
4930 struct emit_status *es;
4932 rtx *r;
4933 tree *t;
4934 int i;
4936 if (es == 0)
4937 return;
4939 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4940 t = es->regno_decl;
4941 i > 0; --i, ++r, ++t)
4943 ggc_mark_rtx (*r);
4944 ggc_mark_tree (*t);
4947 mark_sequence_stack (es->sequence_stack);
4948 ggc_mark_tree (es->sequence_rtl_expr);
4949 ggc_mark_rtx (es->x_first_insn);
4952 /* Generate the constant 0. */
4954 static rtx
4955 gen_const_vector_0 (mode)
4956 enum machine_mode mode;
4958 rtx tem;
4959 rtvec v;
4960 int units, i;
4961 enum machine_mode inner;
4963 units = GET_MODE_NUNITS (mode);
4964 inner = GET_MODE_INNER (mode);
4966 v = rtvec_alloc (units);
4968 /* We need to call this function after we to set CONST0_RTX first. */
4969 if (!CONST0_RTX (inner))
4970 abort ();
4972 for (i = 0; i < units; ++i)
4973 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4975 tem = gen_rtx_CONST_VECTOR (mode, v);
4976 return tem;
4979 /* Create some permanent unique rtl objects shared between all functions.
4980 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4982 void
4983 init_emit_once (line_numbers)
4984 int line_numbers;
4986 int i;
4987 enum machine_mode mode;
4988 enum machine_mode double_mode;
4990 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
4991 tables. */
4992 const_int_htab = htab_create (37, const_int_htab_hash,
4993 const_int_htab_eq, NULL);
4994 ggc_add_deletable_htab (const_int_htab, 0, 0);
4996 const_double_htab = htab_create (37, const_double_htab_hash,
4997 const_double_htab_eq, NULL);
4998 ggc_add_deletable_htab (const_double_htab, 0, 0);
5000 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5001 mem_attrs_htab_eq, NULL);
5002 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
5004 no_line_numbers = ! line_numbers;
5006 /* Compute the word and byte modes. */
5008 byte_mode = VOIDmode;
5009 word_mode = VOIDmode;
5010 double_mode = VOIDmode;
5012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5013 mode = GET_MODE_WIDER_MODE (mode))
5015 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5016 && byte_mode == VOIDmode)
5017 byte_mode = mode;
5019 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5020 && word_mode == VOIDmode)
5021 word_mode = mode;
5024 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5025 mode = GET_MODE_WIDER_MODE (mode))
5027 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5028 && double_mode == VOIDmode)
5029 double_mode = mode;
5032 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5034 /* Assign register numbers to the globally defined register rtx.
5035 This must be done at runtime because the register number field
5036 is in a union and some compilers can't initialize unions. */
5038 pc_rtx = gen_rtx (PC, VOIDmode);
5039 cc0_rtx = gen_rtx (CC0, VOIDmode);
5040 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5041 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5042 if (hard_frame_pointer_rtx == 0)
5043 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5044 HARD_FRAME_POINTER_REGNUM);
5045 if (arg_pointer_rtx == 0)
5046 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5047 virtual_incoming_args_rtx =
5048 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5049 virtual_stack_vars_rtx =
5050 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5051 virtual_stack_dynamic_rtx =
5052 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5053 virtual_outgoing_args_rtx =
5054 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5055 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5057 /* These rtx must be roots if GC is enabled. */
5058 ggc_add_rtx_root (global_rtl, GR_MAX);
5060 #ifdef INIT_EXPANDERS
5061 /* This is to initialize {init|mark|free}_machine_status before the first
5062 call to push_function_context_to. This is needed by the Chill front
5063 end which calls push_function_context_to before the first call to
5064 init_function_start. */
5065 INIT_EXPANDERS;
5066 #endif
5068 /* Create the unique rtx's for certain rtx codes and operand values. */
5070 /* Don't use gen_rtx here since gen_rtx in this case
5071 tries to use these variables. */
5072 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5073 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5074 gen_rtx_raw_CONST_INT (VOIDmode, i);
5075 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
5077 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5078 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5079 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5080 else
5081 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5083 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5084 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5085 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5086 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5088 for (i = 0; i <= 2; i++)
5090 REAL_VALUE_TYPE *r =
5091 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5094 mode = GET_MODE_WIDER_MODE (mode))
5095 const_tiny_rtx[i][(int) mode] =
5096 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5098 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5100 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5101 mode = GET_MODE_WIDER_MODE (mode))
5102 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5104 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5105 mode != VOIDmode;
5106 mode = GET_MODE_WIDER_MODE (mode))
5107 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5110 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5111 mode != VOIDmode;
5112 mode = GET_MODE_WIDER_MODE (mode))
5113 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5115 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5116 mode != VOIDmode;
5117 mode = GET_MODE_WIDER_MODE (mode))
5118 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5120 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5121 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5122 const_tiny_rtx[0][i] = const0_rtx;
5124 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5125 if (STORE_FLAG_VALUE == 1)
5126 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5128 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
5129 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
5130 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
5131 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
5132 ggc_add_rtx_root (&const_true_rtx, 1);
5134 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5135 return_address_pointer_rtx
5136 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5137 #endif
5139 #ifdef STRUCT_VALUE
5140 struct_value_rtx = STRUCT_VALUE;
5141 #else
5142 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5143 #endif
5145 #ifdef STRUCT_VALUE_INCOMING
5146 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5147 #else
5148 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5149 struct_value_incoming_rtx
5150 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5151 #else
5152 struct_value_incoming_rtx = struct_value_rtx;
5153 #endif
5154 #endif
5156 #ifdef STATIC_CHAIN_REGNUM
5157 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5159 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5160 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5161 static_chain_incoming_rtx
5162 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5163 else
5164 #endif
5165 static_chain_incoming_rtx = static_chain_rtx;
5166 #endif
5168 #ifdef STATIC_CHAIN
5169 static_chain_rtx = STATIC_CHAIN;
5171 #ifdef STATIC_CHAIN_INCOMING
5172 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5173 #else
5174 static_chain_incoming_rtx = static_chain_rtx;
5175 #endif
5176 #endif
5178 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5179 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5181 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5182 ggc_add_rtx_root (&struct_value_rtx, 1);
5183 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5184 ggc_add_rtx_root (&static_chain_rtx, 1);
5185 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5186 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5189 /* Query and clear/ restore no_line_numbers. This is used by the
5190 switch / case handling in stmt.c to give proper line numbers in
5191 warnings about unreachable code. */
5194 force_line_numbers ()
5196 int old = no_line_numbers;
5198 no_line_numbers = 0;
5199 if (old)
5200 force_next_line_note ();
5201 return old;
5204 void
5205 restore_line_number_status (old_value)
5206 int old_value;
5208 no_line_numbers = old_value;
5211 /* Produce exact duplicate of insn INSN after AFTER.
5212 Care updating of libcall regions if present. */
5215 emit_copy_of_insn_after (insn, after)
5216 rtx insn, after;
5218 rtx new;
5219 rtx note1, note2, link;
5221 switch (GET_CODE (insn))
5223 case INSN:
5224 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5225 break;
5227 case JUMP_INSN:
5228 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5229 break;
5231 case CALL_INSN:
5232 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5233 if (CALL_INSN_FUNCTION_USAGE (insn))
5234 CALL_INSN_FUNCTION_USAGE (new)
5235 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5236 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5237 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5238 break;
5240 default:
5241 abort ();
5244 /* Update LABEL_NUSES. */
5245 mark_jump_label (PATTERN (new), new, 0);
5247 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5248 make them. */
5249 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5250 if (REG_NOTE_KIND (link) != REG_LABEL)
5252 if (GET_CODE (link) == EXPR_LIST)
5253 REG_NOTES (new)
5254 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5255 XEXP (link, 0),
5256 REG_NOTES (new)));
5257 else
5258 REG_NOTES (new)
5259 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5260 XEXP (link, 0),
5261 REG_NOTES (new)));
5264 /* Fix the libcall sequences. */
5265 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5267 rtx p = new;
5268 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5269 p = PREV_INSN (p);
5270 XEXP (note1, 0) = p;
5271 XEXP (note2, 0) = new;
5273 return new;