2002-05-29 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / emit-rtl.c
blob8f8f701e17b7f446730f44d6b1eedf526e99b378
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
151 /* A hash table storing all CONST_DOUBLEs. */
152 static htab_t const_double_htab;
154 #define first_insn (cfun->emit->x_first_insn)
155 #define last_insn (cfun->emit->x_last_insn)
156 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
157 #define last_linenum (cfun->emit->x_last_linenum)
158 #define last_filename (cfun->emit->x_last_filename)
159 #define first_label_num (cfun->emit->x_first_label_num)
161 static rtx make_jump_insn_raw PARAMS ((rtx));
162 static rtx make_call_insn_raw PARAMS ((rtx));
163 static rtx find_line_note PARAMS ((rtx));
164 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
165 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
166 int));
167 static void unshare_all_rtl_1 PARAMS ((rtx));
168 static void unshare_all_decls PARAMS ((tree));
169 static void reset_used_decls PARAMS ((tree));
170 static void mark_label_nuses PARAMS ((rtx));
171 static hashval_t const_int_htab_hash PARAMS ((const void *));
172 static int const_int_htab_eq PARAMS ((const void *,
173 const void *));
174 static hashval_t const_double_htab_hash PARAMS ((const void *));
175 static int const_double_htab_eq PARAMS ((const void *,
176 const void *));
177 static rtx lookup_const_double PARAMS ((rtx));
178 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
179 static int mem_attrs_htab_eq PARAMS ((const void *,
180 const void *));
181 static void mem_attrs_mark PARAMS ((const void *));
182 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
183 rtx, unsigned int,
184 enum machine_mode));
185 static tree component_ref_for_mem_expr PARAMS ((tree));
186 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
188 /* Probability of the conditional branch currently proceeded by try_split.
189 Set to -1 otherwise. */
190 int split_branch_probability = -1;
192 /* Returns a hash code for X (which is a really a CONST_INT). */
194 static hashval_t
195 const_int_htab_hash (x)
196 const void *x;
198 return (hashval_t) INTVAL ((struct rtx_def *) x);
201 /* Returns non-zero if the value represented by X (which is really a
202 CONST_INT) is the same as that given by Y (which is really a
203 HOST_WIDE_INT *). */
205 static int
206 const_int_htab_eq (x, y)
207 const void *x;
208 const void *y;
210 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
213 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
214 static hashval_t
215 const_double_htab_hash (x)
216 const void *x;
218 hashval_t h = 0;
219 size_t i;
220 rtx value = (rtx) x;
222 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
223 h ^= XWINT (value, i);
224 return h;
227 /* Returns non-zero if the value represented by X (really a ...)
228 is the same as that represented by Y (really a ...) */
229 static int
230 const_double_htab_eq (x, y)
231 const void *x;
232 const void *y;
234 rtx a = (rtx)x, b = (rtx)y;
235 size_t i;
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
239 for (i = 0; i < sizeof(CONST_DOUBLE_FORMAT)-1; i++)
240 if (XWINT (a, i) != XWINT (b, i))
241 return 0;
243 return 1;
246 /* Returns a hash code for X (which is a really a mem_attrs *). */
248 static hashval_t
249 mem_attrs_htab_hash (x)
250 const void *x;
252 mem_attrs *p = (mem_attrs *) x;
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) p->expr);
260 /* Returns non-zero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
264 static int
265 mem_attrs_htab_eq (x, y)
266 const void *x;
267 const void *y;
269 mem_attrs *p = (mem_attrs *) x;
270 mem_attrs *q = (mem_attrs *) y;
272 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
273 && p->size == q->size && p->align == q->align);
276 /* This routine is called when we determine that we need a mem_attrs entry.
277 It marks the associated decl and RTL as being used, if present. */
279 static void
280 mem_attrs_mark (x)
281 const void *x;
283 mem_attrs *p = (mem_attrs *) x;
285 if (p->expr)
286 ggc_mark_tree (p->expr);
288 if (p->offset)
289 ggc_mark_rtx (p->offset);
291 if (p->size)
292 ggc_mark_rtx (p->size);
295 /* Allocate a new mem_attrs structure and insert it into the hash table if
296 one identical to it is not already in the table. We are doing this for
297 MEM of mode MODE. */
299 static mem_attrs *
300 get_mem_attrs (alias, expr, offset, size, align, mode)
301 HOST_WIDE_INT alias;
302 tree expr;
303 rtx offset;
304 rtx size;
305 unsigned int align;
306 enum machine_mode mode;
308 mem_attrs attrs;
309 void **slot;
311 /* If everything is the default, we can just return zero. */
312 if (alias == 0 && expr == 0 && offset == 0
313 && (size == 0
314 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
315 && (align == BITS_PER_UNIT
316 || (STRICT_ALIGNMENT
317 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
318 return 0;
320 attrs.alias = alias;
321 attrs.expr = expr;
322 attrs.offset = offset;
323 attrs.size = size;
324 attrs.align = align;
326 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
327 if (*slot == 0)
329 *slot = ggc_alloc (sizeof (mem_attrs));
330 memcpy (*slot, &attrs, sizeof (mem_attrs));
333 return *slot;
336 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
337 don't attempt to share with the various global pieces of rtl (such as
338 frame_pointer_rtx). */
341 gen_raw_REG (mode, regno)
342 enum machine_mode mode;
343 int regno;
345 rtx x = gen_rtx_raw_REG (mode, regno);
346 ORIGINAL_REGNO (x) = regno;
347 return x;
350 /* There are some RTL codes that require special attention; the generation
351 functions do the raw handling. If you add to this list, modify
352 special_rtx in gengenrtl.c as well. */
355 gen_rtx_CONST_INT (mode, arg)
356 enum machine_mode mode ATTRIBUTE_UNUSED;
357 HOST_WIDE_INT arg;
359 void **slot;
361 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
362 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
364 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
365 if (const_true_rtx && arg == STORE_FLAG_VALUE)
366 return const_true_rtx;
367 #endif
369 /* Look up the CONST_INT in the hash table. */
370 slot = htab_find_slot_with_hash (const_int_htab, &arg,
371 (hashval_t) arg, INSERT);
372 if (*slot == 0)
373 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
375 return (rtx) *slot;
379 gen_int_mode (c, mode)
380 HOST_WIDE_INT c;
381 enum machine_mode mode;
383 return GEN_INT (trunc_int_for_mode (c, mode));
386 /* CONST_DOUBLEs might be created from pairs of integers, or from
387 REAL_VALUE_TYPEs. Also, their length is known only at run time,
388 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
390 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
391 hash table. If so, return its counterpart; otherwise add it
392 to the hash table and return it. */
393 static rtx
394 lookup_const_double (real)
395 rtx real;
397 void **slot = htab_find_slot (const_double_htab, real, INSERT);
398 if (*slot == 0)
399 *slot = real;
401 return (rtx) *slot;
404 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
405 VALUE in mode MODE. */
407 const_double_from_real_value (value, mode)
408 REAL_VALUE_TYPE value;
409 enum machine_mode mode;
411 rtx real = rtx_alloc (CONST_DOUBLE);
412 PUT_MODE (real, mode);
414 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
416 return lookup_const_double (real);
419 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
420 of ints: I0 is the low-order word and I1 is the high-order word.
421 Do not use this routine for non-integer modes; convert to
422 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
425 immed_double_const (i0, i1, mode)
426 HOST_WIDE_INT i0, i1;
427 enum machine_mode mode;
429 rtx value;
430 unsigned int i;
432 if (mode != VOIDmode)
434 int width;
435 if (GET_MODE_CLASS (mode) != MODE_INT
436 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
437 abort ();
439 /* We clear out all bits that don't belong in MODE, unless they and
440 our sign bit are all one. So we get either a reasonable negative
441 value or a reasonable unsigned value for this mode. */
442 width = GET_MODE_BITSIZE (mode);
443 if (width < HOST_BITS_PER_WIDE_INT
444 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
445 != ((HOST_WIDE_INT) (-1) << (width - 1))))
446 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
447 else if (width == HOST_BITS_PER_WIDE_INT
448 && ! (i1 == ~0 && i0 < 0))
449 i1 = 0;
450 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
451 /* We cannot represent this value as a constant. */
452 abort ();
454 /* If this would be an entire word for the target, but is not for
455 the host, then sign-extend on the host so that the number will
456 look the same way on the host that it would on the target.
458 For example, when building a 64 bit alpha hosted 32 bit sparc
459 targeted compiler, then we want the 32 bit unsigned value -1 to be
460 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
461 The latter confuses the sparc backend. */
463 if (width < HOST_BITS_PER_WIDE_INT
464 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
465 i0 |= ((HOST_WIDE_INT) (-1) << width);
467 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
468 CONST_INT.
470 ??? Strictly speaking, this is wrong if we create a CONST_INT for
471 a large unsigned constant with the size of MODE being
472 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
473 in a wider mode. In that case we will mis-interpret it as a
474 negative number.
476 Unfortunately, the only alternative is to make a CONST_DOUBLE for
477 any constant in any mode if it is an unsigned constant larger
478 than the maximum signed integer in an int on the host. However,
479 doing this will break everyone that always expects to see a
480 CONST_INT for SImode and smaller.
482 We have always been making CONST_INTs in this case, so nothing
483 new is being broken. */
485 if (width <= HOST_BITS_PER_WIDE_INT)
486 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
489 /* If this integer fits in one word, return a CONST_INT. */
490 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
491 return GEN_INT (i0);
493 /* We use VOIDmode for integers. */
494 value = rtx_alloc (CONST_DOUBLE);
495 PUT_MODE (value, VOIDmode);
497 CONST_DOUBLE_LOW (value) = i0;
498 CONST_DOUBLE_HIGH (value) = i1;
500 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
501 XWINT (value, i) = 0;
503 return lookup_const_double (value);
507 gen_rtx_REG (mode, regno)
508 enum machine_mode mode;
509 unsigned int regno;
511 /* In case the MD file explicitly references the frame pointer, have
512 all such references point to the same frame pointer. This is
513 used during frame pointer elimination to distinguish the explicit
514 references to these registers from pseudos that happened to be
515 assigned to them.
517 If we have eliminated the frame pointer or arg pointer, we will
518 be using it as a normal register, for example as a spill
519 register. In such cases, we might be accessing it in a mode that
520 is not Pmode and therefore cannot use the pre-allocated rtx.
522 Also don't do this when we are making new REGs in reload, since
523 we don't want to get confused with the real pointers. */
525 if (mode == Pmode && !reload_in_progress)
527 if (regno == FRAME_POINTER_REGNUM)
528 return frame_pointer_rtx;
529 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
530 if (regno == HARD_FRAME_POINTER_REGNUM)
531 return hard_frame_pointer_rtx;
532 #endif
533 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
534 if (regno == ARG_POINTER_REGNUM)
535 return arg_pointer_rtx;
536 #endif
537 #ifdef RETURN_ADDRESS_POINTER_REGNUM
538 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
539 return return_address_pointer_rtx;
540 #endif
541 if (regno == PIC_OFFSET_TABLE_REGNUM
542 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
543 return pic_offset_table_rtx;
544 if (regno == STACK_POINTER_REGNUM)
545 return stack_pointer_rtx;
548 return gen_raw_REG (mode, regno);
552 gen_rtx_MEM (mode, addr)
553 enum machine_mode mode;
554 rtx addr;
556 rtx rt = gen_rtx_raw_MEM (mode, addr);
558 /* This field is not cleared by the mere allocation of the rtx, so
559 we clear it here. */
560 MEM_ATTRS (rt) = 0;
562 return rt;
566 gen_rtx_SUBREG (mode, reg, offset)
567 enum machine_mode mode;
568 rtx reg;
569 int offset;
571 /* This is the most common failure type.
572 Catch it early so we can see who does it. */
573 if ((offset % GET_MODE_SIZE (mode)) != 0)
574 abort ();
576 /* This check isn't usable right now because combine will
577 throw arbitrary crap like a CALL into a SUBREG in
578 gen_lowpart_for_combine so we must just eat it. */
579 #if 0
580 /* Check for this too. */
581 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
582 abort ();
583 #endif
584 return gen_rtx_raw_SUBREG (mode, reg, offset);
587 /* Generate a SUBREG representing the least-significant part of REG if MODE
588 is smaller than mode of REG, otherwise paradoxical SUBREG. */
591 gen_lowpart_SUBREG (mode, reg)
592 enum machine_mode mode;
593 rtx reg;
595 enum machine_mode inmode;
597 inmode = GET_MODE (reg);
598 if (inmode == VOIDmode)
599 inmode = mode;
600 return gen_rtx_SUBREG (mode, reg,
601 subreg_lowpart_offset (mode, inmode));
604 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
606 ** This routine generates an RTX of the size specified by
607 ** <code>, which is an RTX code. The RTX structure is initialized
608 ** from the arguments <element1> through <elementn>, which are
609 ** interpreted according to the specific RTX type's format. The
610 ** special machine mode associated with the rtx (if any) is specified
611 ** in <mode>.
613 ** gen_rtx can be invoked in a way which resembles the lisp-like
614 ** rtx it will generate. For example, the following rtx structure:
616 ** (plus:QI (mem:QI (reg:SI 1))
617 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
619 ** ...would be generated by the following C code:
621 ** gen_rtx (PLUS, QImode,
622 ** gen_rtx (MEM, QImode,
623 ** gen_rtx (REG, SImode, 1)),
624 ** gen_rtx (MEM, QImode,
625 ** gen_rtx (PLUS, SImode,
626 ** gen_rtx (REG, SImode, 2),
627 ** gen_rtx (REG, SImode, 3)))),
630 /*VARARGS2*/
632 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
634 int i; /* Array indices... */
635 const char *fmt; /* Current rtx's format... */
636 rtx rt_val; /* RTX to return to caller... */
638 VA_OPEN (p, mode);
639 VA_FIXEDARG (p, enum rtx_code, code);
640 VA_FIXEDARG (p, enum machine_mode, mode);
642 switch (code)
644 case CONST_INT:
645 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
646 break;
648 case CONST_DOUBLE:
650 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
651 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
653 rt_val = immed_double_const (arg0, arg1, mode);
655 break;
657 case REG:
658 rt_val = gen_rtx_REG (mode, va_arg (p, int));
659 break;
661 case MEM:
662 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
663 break;
665 default:
666 rt_val = rtx_alloc (code); /* Allocate the storage space. */
667 rt_val->mode = mode; /* Store the machine mode... */
669 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
670 for (i = 0; i < GET_RTX_LENGTH (code); i++)
672 switch (*fmt++)
674 case '0': /* Unused field. */
675 break;
677 case 'i': /* An integer? */
678 XINT (rt_val, i) = va_arg (p, int);
679 break;
681 case 'w': /* A wide integer? */
682 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
683 break;
685 case 's': /* A string? */
686 XSTR (rt_val, i) = va_arg (p, char *);
687 break;
689 case 'e': /* An expression? */
690 case 'u': /* An insn? Same except when printing. */
691 XEXP (rt_val, i) = va_arg (p, rtx);
692 break;
694 case 'E': /* An RTX vector? */
695 XVEC (rt_val, i) = va_arg (p, rtvec);
696 break;
698 case 'b': /* A bitmap? */
699 XBITMAP (rt_val, i) = va_arg (p, bitmap);
700 break;
702 case 't': /* A tree? */
703 XTREE (rt_val, i) = va_arg (p, tree);
704 break;
706 default:
707 abort ();
710 break;
713 VA_CLOSE (p);
714 return rt_val;
717 /* gen_rtvec (n, [rt1, ..., rtn])
719 ** This routine creates an rtvec and stores within it the
720 ** pointers to rtx's which are its arguments.
723 /*VARARGS1*/
724 rtvec
725 gen_rtvec VPARAMS ((int n, ...))
727 int i, save_n;
728 rtx *vector;
730 VA_OPEN (p, n);
731 VA_FIXEDARG (p, int, n);
733 if (n == 0)
734 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
736 vector = (rtx *) alloca (n * sizeof (rtx));
738 for (i = 0; i < n; i++)
739 vector[i] = va_arg (p, rtx);
741 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
742 save_n = n;
743 VA_CLOSE (p);
745 return gen_rtvec_v (save_n, vector);
748 rtvec
749 gen_rtvec_v (n, argp)
750 int n;
751 rtx *argp;
753 int i;
754 rtvec rt_val;
756 if (n == 0)
757 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
759 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
761 for (i = 0; i < n; i++)
762 rt_val->elem[i] = *argp++;
764 return rt_val;
767 /* Generate a REG rtx for a new pseudo register of mode MODE.
768 This pseudo is assigned the next sequential register number. */
771 gen_reg_rtx (mode)
772 enum machine_mode mode;
774 struct function *f = cfun;
775 rtx val;
777 /* Don't let anything called after initial flow analysis create new
778 registers. */
779 if (no_new_pseudos)
780 abort ();
782 if (generating_concat_p
783 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
784 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
786 /* For complex modes, don't make a single pseudo.
787 Instead, make a CONCAT of two pseudos.
788 This allows noncontiguous allocation of the real and imaginary parts,
789 which makes much better code. Besides, allocating DCmode
790 pseudos overstrains reload on some machines like the 386. */
791 rtx realpart, imagpart;
792 int size = GET_MODE_UNIT_SIZE (mode);
793 enum machine_mode partmode
794 = mode_for_size (size * BITS_PER_UNIT,
795 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
796 ? MODE_FLOAT : MODE_INT),
799 realpart = gen_reg_rtx (partmode);
800 imagpart = gen_reg_rtx (partmode);
801 return gen_rtx_CONCAT (mode, realpart, imagpart);
804 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
805 enough to have an element for this pseudo reg number. */
807 if (reg_rtx_no == f->emit->regno_pointer_align_length)
809 int old_size = f->emit->regno_pointer_align_length;
810 char *new;
811 rtx *new1;
812 tree *new2;
814 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
815 memset (new + old_size, 0, old_size);
816 f->emit->regno_pointer_align = (unsigned char *) new;
818 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
819 old_size * 2 * sizeof (rtx));
820 memset (new1 + old_size, 0, old_size * sizeof (rtx));
821 regno_reg_rtx = new1;
823 new2 = (tree *) xrealloc (f->emit->regno_decl,
824 old_size * 2 * sizeof (tree));
825 memset (new2 + old_size, 0, old_size * sizeof (tree));
826 f->emit->regno_decl = new2;
828 f->emit->regno_pointer_align_length = old_size * 2;
831 val = gen_raw_REG (mode, reg_rtx_no);
832 regno_reg_rtx[reg_rtx_no++] = val;
833 return val;
836 /* Identify REG (which may be a CONCAT) as a user register. */
838 void
839 mark_user_reg (reg)
840 rtx reg;
842 if (GET_CODE (reg) == CONCAT)
844 REG_USERVAR_P (XEXP (reg, 0)) = 1;
845 REG_USERVAR_P (XEXP (reg, 1)) = 1;
847 else if (GET_CODE (reg) == REG)
848 REG_USERVAR_P (reg) = 1;
849 else
850 abort ();
853 /* Identify REG as a probable pointer register and show its alignment
854 as ALIGN, if nonzero. */
856 void
857 mark_reg_pointer (reg, align)
858 rtx reg;
859 int align;
861 if (! REG_POINTER (reg))
863 REG_POINTER (reg) = 1;
865 if (align)
866 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
868 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
869 /* We can no-longer be sure just how aligned this pointer is */
870 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
873 /* Return 1 plus largest pseudo reg number used in the current function. */
876 max_reg_num ()
878 return reg_rtx_no;
881 /* Return 1 + the largest label number used so far in the current function. */
884 max_label_num ()
886 if (last_label_num && label_num == base_label_num)
887 return last_label_num;
888 return label_num;
891 /* Return first label number used in this function (if any were used). */
894 get_first_label_num ()
896 return first_label_num;
899 /* Return the final regno of X, which is a SUBREG of a hard
900 register. */
902 subreg_hard_regno (x, check_mode)
903 rtx x;
904 int check_mode;
906 enum machine_mode mode = GET_MODE (x);
907 unsigned int byte_offset, base_regno, final_regno;
908 rtx reg = SUBREG_REG (x);
910 /* This is where we attempt to catch illegal subregs
911 created by the compiler. */
912 if (GET_CODE (x) != SUBREG
913 || GET_CODE (reg) != REG)
914 abort ();
915 base_regno = REGNO (reg);
916 if (base_regno >= FIRST_PSEUDO_REGISTER)
917 abort ();
918 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
919 abort ();
921 /* Catch non-congruent offsets too. */
922 byte_offset = SUBREG_BYTE (x);
923 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
924 abort ();
926 final_regno = subreg_regno (x);
928 return final_regno;
931 /* Return a value representing some low-order bits of X, where the number
932 of low-order bits is given by MODE. Note that no conversion is done
933 between floating-point and fixed-point values, rather, the bit
934 representation is returned.
936 This function handles the cases in common between gen_lowpart, below,
937 and two variants in cse.c and combine.c. These are the cases that can
938 be safely handled at all points in the compilation.
940 If this is not a case we can handle, return 0. */
943 gen_lowpart_common (mode, x)
944 enum machine_mode mode;
945 rtx x;
947 int msize = GET_MODE_SIZE (mode);
948 int xsize = GET_MODE_SIZE (GET_MODE (x));
949 int offset = 0;
951 if (GET_MODE (x) == mode)
952 return x;
954 /* MODE must occupy no more words than the mode of X. */
955 if (GET_MODE (x) != VOIDmode
956 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
957 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
958 return 0;
960 offset = subreg_lowpart_offset (mode, GET_MODE (x));
962 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
963 && (GET_MODE_CLASS (mode) == MODE_INT
964 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
966 /* If we are getting the low-order part of something that has been
967 sign- or zero-extended, we can either just use the object being
968 extended or make a narrower extension. If we want an even smaller
969 piece than the size of the object being extended, call ourselves
970 recursively.
972 This case is used mostly by combine and cse. */
974 if (GET_MODE (XEXP (x, 0)) == mode)
975 return XEXP (x, 0);
976 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
977 return gen_lowpart_common (mode, XEXP (x, 0));
978 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
979 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
981 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
982 || GET_CODE (x) == CONCAT)
983 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
984 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
985 from the low-order part of the constant. */
986 else if ((GET_MODE_CLASS (mode) == MODE_INT
987 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
988 && GET_MODE (x) == VOIDmode
989 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
991 /* If MODE is twice the host word size, X is already the desired
992 representation. Otherwise, if MODE is wider than a word, we can't
993 do this. If MODE is exactly a word, return just one CONST_INT. */
995 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
996 return x;
997 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
998 return 0;
999 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1000 return (GET_CODE (x) == CONST_INT ? x
1001 : GEN_INT (CONST_DOUBLE_LOW (x)));
1002 else
1004 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1005 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1006 : CONST_DOUBLE_LOW (x));
1008 /* Sign extend to HOST_WIDE_INT. */
1009 val = trunc_int_for_mode (val, mode);
1011 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1012 : GEN_INT (val));
1016 /* The floating-point emulator can handle all conversions between
1017 FP and integer operands. This simplifies reload because it
1018 doesn't have to deal with constructs like (subreg:DI
1019 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1020 /* Single-precision floats are always 32-bits and double-precision
1021 floats are always 64-bits. */
1023 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1024 && GET_MODE_BITSIZE (mode) == 32
1025 && GET_CODE (x) == CONST_INT)
1027 REAL_VALUE_TYPE r;
1028 HOST_WIDE_INT i;
1030 i = INTVAL (x);
1031 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1032 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1034 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1035 && GET_MODE_BITSIZE (mode) == 64
1036 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1037 && GET_MODE (x) == VOIDmode)
1039 REAL_VALUE_TYPE r;
1040 HOST_WIDE_INT i[2];
1041 HOST_WIDE_INT low, high;
1043 if (GET_CODE (x) == CONST_INT)
1045 low = INTVAL (x);
1046 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1048 else
1050 low = CONST_DOUBLE_LOW (x);
1051 high = CONST_DOUBLE_HIGH (x);
1054 #if HOST_BITS_PER_WIDE_INT == 32
1055 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1056 target machine. */
1057 if (WORDS_BIG_ENDIAN)
1058 i[0] = high, i[1] = low;
1059 else
1060 i[0] = low, i[1] = high;
1061 #else
1062 i[0] = low;
1063 #endif
1065 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1066 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1068 else if ((GET_MODE_CLASS (mode) == MODE_INT
1069 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1070 && GET_CODE (x) == CONST_DOUBLE
1071 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1073 REAL_VALUE_TYPE r;
1074 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1075 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1077 /* Convert 'r' into an array of four 32-bit words in target word
1078 order. */
1079 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1080 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1082 case 32:
1083 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1084 i[1] = 0;
1085 i[2] = 0;
1086 i[3 - 3 * endian] = 0;
1087 break;
1088 case 64:
1089 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1090 i[2 - 2 * endian] = 0;
1091 i[3 - 2 * endian] = 0;
1092 break;
1093 case 96:
1094 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1095 i[3 - 3 * endian] = 0;
1096 break;
1097 case 128:
1098 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1099 break;
1100 default:
1101 abort ();
1103 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1104 and return it. */
1105 #if HOST_BITS_PER_WIDE_INT == 32
1106 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1107 #else
1108 if (HOST_BITS_PER_WIDE_INT != 64)
1109 abort ();
1111 return immed_double_const ((((unsigned long) i[3 * endian])
1112 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1113 (((unsigned long) i[2 - endian])
1114 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1115 mode);
1116 #endif
1119 /* Otherwise, we can't do this. */
1120 return 0;
1123 /* Return the real part (which has mode MODE) of a complex value X.
1124 This always comes at the low address in memory. */
1127 gen_realpart (mode, x)
1128 enum machine_mode mode;
1129 rtx x;
1131 if (WORDS_BIG_ENDIAN
1132 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1133 && REG_P (x)
1134 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1135 internal_error
1136 ("can't access real part of complex value in hard register");
1137 else if (WORDS_BIG_ENDIAN)
1138 return gen_highpart (mode, x);
1139 else
1140 return gen_lowpart (mode, x);
1143 /* Return the imaginary part (which has mode MODE) of a complex value X.
1144 This always comes at the high address in memory. */
1147 gen_imagpart (mode, x)
1148 enum machine_mode mode;
1149 rtx x;
1151 if (WORDS_BIG_ENDIAN)
1152 return gen_lowpart (mode, x);
1153 else if (! WORDS_BIG_ENDIAN
1154 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1155 && REG_P (x)
1156 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1157 internal_error
1158 ("can't access imaginary part of complex value in hard register");
1159 else
1160 return gen_highpart (mode, x);
1163 /* Return 1 iff X, assumed to be a SUBREG,
1164 refers to the real part of the complex value in its containing reg.
1165 Complex values are always stored with the real part in the first word,
1166 regardless of WORDS_BIG_ENDIAN. */
1169 subreg_realpart_p (x)
1170 rtx x;
1172 if (GET_CODE (x) != SUBREG)
1173 abort ();
1175 return ((unsigned int) SUBREG_BYTE (x)
1176 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1179 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1180 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1181 least-significant part of X.
1182 MODE specifies how big a part of X to return;
1183 it usually should not be larger than a word.
1184 If X is a MEM whose address is a QUEUED, the value may be so also. */
1187 gen_lowpart (mode, x)
1188 enum machine_mode mode;
1189 rtx x;
1191 rtx result = gen_lowpart_common (mode, x);
1193 if (result)
1194 return result;
1195 else if (GET_CODE (x) == REG)
1197 /* Must be a hard reg that's not valid in MODE. */
1198 result = gen_lowpart_common (mode, copy_to_reg (x));
1199 if (result == 0)
1200 abort ();
1201 return result;
1203 else if (GET_CODE (x) == MEM)
1205 /* The only additional case we can do is MEM. */
1206 int offset = 0;
1207 if (WORDS_BIG_ENDIAN)
1208 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1209 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1211 if (BYTES_BIG_ENDIAN)
1212 /* Adjust the address so that the address-after-the-data
1213 is unchanged. */
1214 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1215 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1217 return adjust_address (x, mode, offset);
1219 else if (GET_CODE (x) == ADDRESSOF)
1220 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1221 else
1222 abort ();
1225 /* Like `gen_lowpart', but refer to the most significant part.
1226 This is used to access the imaginary part of a complex number. */
1229 gen_highpart (mode, x)
1230 enum machine_mode mode;
1231 rtx x;
1233 unsigned int msize = GET_MODE_SIZE (mode);
1234 rtx result;
1236 /* This case loses if X is a subreg. To catch bugs early,
1237 complain if an invalid MODE is used even in other cases. */
1238 if (msize > UNITS_PER_WORD
1239 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1240 abort ();
1242 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1243 subreg_highpart_offset (mode, GET_MODE (x)));
1245 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1246 the target if we have a MEM. gen_highpart must return a valid operand,
1247 emitting code if necessary to do so. */
1248 if (result != NULL_RTX && GET_CODE (result) == MEM)
1249 result = validize_mem (result);
1251 if (!result)
1252 abort ();
1253 return result;
1256 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1257 be VOIDmode constant. */
1259 gen_highpart_mode (outermode, innermode, exp)
1260 enum machine_mode outermode, innermode;
1261 rtx exp;
1263 if (GET_MODE (exp) != VOIDmode)
1265 if (GET_MODE (exp) != innermode)
1266 abort ();
1267 return gen_highpart (outermode, exp);
1269 return simplify_gen_subreg (outermode, exp, innermode,
1270 subreg_highpart_offset (outermode, innermode));
1273 /* Return offset in bytes to get OUTERMODE low part
1274 of the value in mode INNERMODE stored in memory in target format. */
1276 unsigned int
1277 subreg_lowpart_offset (outermode, innermode)
1278 enum machine_mode outermode, innermode;
1280 unsigned int offset = 0;
1281 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1283 if (difference > 0)
1285 if (WORDS_BIG_ENDIAN)
1286 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1287 if (BYTES_BIG_ENDIAN)
1288 offset += difference % UNITS_PER_WORD;
1291 return offset;
1294 /* Return offset in bytes to get OUTERMODE high part
1295 of the value in mode INNERMODE stored in memory in target format. */
1296 unsigned int
1297 subreg_highpart_offset (outermode, innermode)
1298 enum machine_mode outermode, innermode;
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1304 abort ();
1306 if (difference > 0)
1308 if (! WORDS_BIG_ENDIAN)
1309 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1310 if (! BYTES_BIG_ENDIAN)
1311 offset += difference % UNITS_PER_WORD;
1314 return offset;
1317 /* Return 1 iff X, assumed to be a SUBREG,
1318 refers to the least significant part of its containing reg.
1319 If X is not a SUBREG, always return 1 (it is its own low part!). */
1322 subreg_lowpart_p (x)
1323 rtx x;
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1335 /* Helper routine for all the constant cases of operand_subword.
1336 Some places invoke this directly. */
1339 constant_subword (op, offset, mode)
1340 rtx op;
1341 int offset;
1342 enum machine_mode mode;
1344 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1345 HOST_WIDE_INT val;
1347 /* If OP is already an integer word, return it. */
1348 if (GET_MODE_CLASS (mode) == MODE_INT
1349 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1350 return op;
1352 /* The output is some bits, the width of the target machine's word.
1353 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1354 host can't. */
1355 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1356 && GET_MODE_CLASS (mode) == MODE_FLOAT
1357 && GET_MODE_BITSIZE (mode) == 64
1358 && GET_CODE (op) == CONST_DOUBLE)
1360 long k[2];
1361 REAL_VALUE_TYPE rv;
1363 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1364 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1366 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1367 which the words are written depends on the word endianness.
1368 ??? This is a potential portability problem and should
1369 be fixed at some point.
1371 We must exercise caution with the sign bit. By definition there
1372 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1373 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1374 So we explicitly mask and sign-extend as necessary. */
1375 if (BITS_PER_WORD == 32)
1377 val = k[offset];
1378 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1379 return GEN_INT (val);
1381 #if HOST_BITS_PER_WIDE_INT >= 64
1382 else if (BITS_PER_WORD >= 64 && offset == 0)
1384 val = k[! WORDS_BIG_ENDIAN];
1385 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1386 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1387 return GEN_INT (val);
1389 #endif
1390 else if (BITS_PER_WORD == 16)
1392 val = k[offset >> 1];
1393 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1394 val >>= 16;
1395 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1396 return GEN_INT (val);
1398 else
1399 abort ();
1401 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1402 && GET_MODE_CLASS (mode) == MODE_FLOAT
1403 && GET_MODE_BITSIZE (mode) > 64
1404 && GET_CODE (op) == CONST_DOUBLE)
1406 long k[4];
1407 REAL_VALUE_TYPE rv;
1409 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1410 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1412 if (BITS_PER_WORD == 32)
1414 val = k[offset];
1415 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1416 return GEN_INT (val);
1418 #if HOST_BITS_PER_WIDE_INT >= 64
1419 else if (BITS_PER_WORD >= 64 && offset <= 1)
1421 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1422 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1423 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1424 return GEN_INT (val);
1426 #endif
1427 else
1428 abort ();
1431 /* Single word float is a little harder, since single- and double-word
1432 values often do not have the same high-order bits. We have already
1433 verified that we want the only defined word of the single-word value. */
1434 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1435 && GET_MODE_BITSIZE (mode) == 32
1436 && GET_CODE (op) == CONST_DOUBLE)
1438 long l;
1439 REAL_VALUE_TYPE rv;
1441 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1442 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1444 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1445 val = l;
1446 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1448 if (BITS_PER_WORD == 16)
1450 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1451 val >>= 16;
1452 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1455 return GEN_INT (val);
1458 /* The only remaining cases that we can handle are integers.
1459 Convert to proper endianness now since these cases need it.
1460 At this point, offset == 0 means the low-order word.
1462 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1463 in general. However, if OP is (const_int 0), we can just return
1464 it for any word. */
1466 if (op == const0_rtx)
1467 return op;
1469 if (GET_MODE_CLASS (mode) != MODE_INT
1470 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1471 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1472 return 0;
1474 if (WORDS_BIG_ENDIAN)
1475 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1477 /* Find out which word on the host machine this value is in and get
1478 it from the constant. */
1479 val = (offset / size_ratio == 0
1480 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1481 : (GET_CODE (op) == CONST_INT
1482 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1484 /* Get the value we want into the low bits of val. */
1485 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1486 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1488 val = trunc_int_for_mode (val, word_mode);
1490 return GEN_INT (val);
1493 /* Return subword OFFSET of operand OP.
1494 The word number, OFFSET, is interpreted as the word number starting
1495 at the low-order address. OFFSET 0 is the low-order word if not
1496 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1498 If we cannot extract the required word, we return zero. Otherwise,
1499 an rtx corresponding to the requested word will be returned.
1501 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1502 reload has completed, a valid address will always be returned. After
1503 reload, if a valid address cannot be returned, we return zero.
1505 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1506 it is the responsibility of the caller.
1508 MODE is the mode of OP in case it is a CONST_INT.
1510 ??? This is still rather broken for some cases. The problem for the
1511 moment is that all callers of this thing provide no 'goal mode' to
1512 tell us to work with. This exists because all callers were written
1513 in a word based SUBREG world.
1514 Now use of this function can be deprecated by simplify_subreg in most
1515 cases.
1519 operand_subword (op, offset, validate_address, mode)
1520 rtx op;
1521 unsigned int offset;
1522 int validate_address;
1523 enum machine_mode mode;
1525 if (mode == VOIDmode)
1526 mode = GET_MODE (op);
1528 if (mode == VOIDmode)
1529 abort ();
1531 /* If OP is narrower than a word, fail. */
1532 if (mode != BLKmode
1533 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1534 return 0;
1536 /* If we want a word outside OP, return zero. */
1537 if (mode != BLKmode
1538 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1539 return const0_rtx;
1541 /* Form a new MEM at the requested address. */
1542 if (GET_CODE (op) == MEM)
1544 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1546 if (! validate_address)
1547 return new;
1549 else if (reload_completed)
1551 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1552 return 0;
1554 else
1555 return replace_equiv_address (new, XEXP (new, 0));
1558 /* Rest can be handled by simplify_subreg. */
1559 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1562 /* Similar to `operand_subword', but never return 0. If we can't extract
1563 the required subword, put OP into a register and try again. If that fails,
1564 abort. We always validate the address in this case.
1566 MODE is the mode of OP, in case it is CONST_INT. */
1569 operand_subword_force (op, offset, mode)
1570 rtx op;
1571 unsigned int offset;
1572 enum machine_mode mode;
1574 rtx result = operand_subword (op, offset, 1, mode);
1576 if (result)
1577 return result;
1579 if (mode != BLKmode && mode != VOIDmode)
1581 /* If this is a register which can not be accessed by words, copy it
1582 to a pseudo register. */
1583 if (GET_CODE (op) == REG)
1584 op = copy_to_reg (op);
1585 else
1586 op = force_reg (mode, op);
1589 result = operand_subword (op, offset, 1, mode);
1590 if (result == 0)
1591 abort ();
1593 return result;
1596 /* Given a compare instruction, swap the operands.
1597 A test instruction is changed into a compare of 0 against the operand. */
1599 void
1600 reverse_comparison (insn)
1601 rtx insn;
1603 rtx body = PATTERN (insn);
1604 rtx comp;
1606 if (GET_CODE (body) == SET)
1607 comp = SET_SRC (body);
1608 else
1609 comp = SET_SRC (XVECEXP (body, 0, 0));
1611 if (GET_CODE (comp) == COMPARE)
1613 rtx op0 = XEXP (comp, 0);
1614 rtx op1 = XEXP (comp, 1);
1615 XEXP (comp, 0) = op1;
1616 XEXP (comp, 1) = op0;
1618 else
1620 rtx new = gen_rtx_COMPARE (VOIDmode,
1621 CONST0_RTX (GET_MODE (comp)), comp);
1622 if (GET_CODE (body) == SET)
1623 SET_SRC (body) = new;
1624 else
1625 SET_SRC (XVECEXP (body, 0, 0)) = new;
1629 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1630 or (2) a component ref of something variable. Represent the later with
1631 a NULL expression. */
1633 static tree
1634 component_ref_for_mem_expr (ref)
1635 tree ref;
1637 tree inner = TREE_OPERAND (ref, 0);
1639 if (TREE_CODE (inner) == COMPONENT_REF)
1640 inner = component_ref_for_mem_expr (inner);
1641 else
1643 tree placeholder_ptr = 0;
1645 /* Now remove any conversions: they don't change what the underlying
1646 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1647 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1648 || TREE_CODE (inner) == NON_LVALUE_EXPR
1649 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1650 || TREE_CODE (inner) == SAVE_EXPR
1651 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1652 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1653 inner = find_placeholder (inner, &placeholder_ptr);
1654 else
1655 inner = TREE_OPERAND (inner, 0);
1657 if (! DECL_P (inner))
1658 inner = NULL_TREE;
1661 if (inner == TREE_OPERAND (ref, 0))
1662 return ref;
1663 else
1664 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1665 TREE_OPERAND (ref, 1));
1668 /* Given REF, a MEM, and T, either the type of X or the expression
1669 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1670 if we are making a new object of this type. */
1672 void
1673 set_mem_attributes (ref, t, objectp)
1674 rtx ref;
1675 tree t;
1676 int objectp;
1678 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1679 tree expr = MEM_EXPR (ref);
1680 rtx offset = MEM_OFFSET (ref);
1681 rtx size = MEM_SIZE (ref);
1682 unsigned int align = MEM_ALIGN (ref);
1683 tree type;
1685 /* It can happen that type_for_mode was given a mode for which there
1686 is no language-level type. In which case it returns NULL, which
1687 we can see here. */
1688 if (t == NULL_TREE)
1689 return;
1691 type = TYPE_P (t) ? t : TREE_TYPE (t);
1693 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1694 wrong answer, as it assumes that DECL_RTL already has the right alias
1695 info. Callers should not set DECL_RTL until after the call to
1696 set_mem_attributes. */
1697 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1698 abort ();
1700 /* Get the alias set from the expression or type (perhaps using a
1701 front-end routine) and use it. */
1702 alias = get_alias_set (t);
1704 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1705 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1706 RTX_UNCHANGING_P (ref)
1707 |= ((lang_hooks.honor_readonly
1708 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1709 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1711 /* If we are making an object of this type, or if this is a DECL, we know
1712 that it is a scalar if the type is not an aggregate. */
1713 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1714 MEM_SCALAR_P (ref) = 1;
1716 /* We can set the alignment from the type if we are making an object,
1717 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1718 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1719 align = MAX (align, TYPE_ALIGN (type));
1721 /* If the size is known, we can set that. */
1722 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1723 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1725 /* If T is not a type, we may be able to deduce some more information about
1726 the expression. */
1727 if (! TYPE_P (t))
1729 maybe_set_unchanging (ref, t);
1730 if (TREE_THIS_VOLATILE (t))
1731 MEM_VOLATILE_P (ref) = 1;
1733 /* Now remove any conversions: they don't change what the underlying
1734 object is. Likewise for SAVE_EXPR. */
1735 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1736 || TREE_CODE (t) == NON_LVALUE_EXPR
1737 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1738 || TREE_CODE (t) == SAVE_EXPR)
1739 t = TREE_OPERAND (t, 0);
1741 /* If this expression can't be addressed (e.g., it contains a reference
1742 to a non-addressable field), show we don't change its alias set. */
1743 if (! can_address_p (t))
1744 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1746 /* If this is a decl, set the attributes of the MEM from it. */
1747 if (DECL_P (t))
1749 expr = t;
1750 offset = const0_rtx;
1751 size = (DECL_SIZE_UNIT (t)
1752 && host_integerp (DECL_SIZE_UNIT (t), 1)
1753 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1754 align = DECL_ALIGN (t);
1757 /* If this is a constant, we know the alignment. */
1758 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1760 align = TYPE_ALIGN (type);
1761 #ifdef CONSTANT_ALIGNMENT
1762 align = CONSTANT_ALIGNMENT (t, align);
1763 #endif
1766 /* If this is a field reference and not a bit-field, record it. */
1767 /* ??? There is some information that can be gleened from bit-fields,
1768 such as the word offset in the structure that might be modified.
1769 But skip it for now. */
1770 else if (TREE_CODE (t) == COMPONENT_REF
1771 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1773 expr = component_ref_for_mem_expr (t);
1774 offset = const0_rtx;
1775 /* ??? Any reason the field size would be different than
1776 the size we got from the type? */
1779 /* If this is an array reference, look for an outer field reference. */
1780 else if (TREE_CODE (t) == ARRAY_REF)
1782 tree off_tree = size_zero_node;
1786 off_tree
1787 = fold (build (PLUS_EXPR, sizetype,
1788 fold (build (MULT_EXPR, sizetype,
1789 TREE_OPERAND (t, 1),
1790 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1791 off_tree));
1792 t = TREE_OPERAND (t, 0);
1794 while (TREE_CODE (t) == ARRAY_REF);
1796 if (TREE_CODE (t) == COMPONENT_REF)
1798 expr = component_ref_for_mem_expr (t);
1799 if (host_integerp (off_tree, 1))
1800 offset = GEN_INT (tree_low_cst (off_tree, 1));
1801 /* ??? Any reason the field size would be different than
1802 the size we got from the type? */
1807 /* Now set the attributes we computed above. */
1808 MEM_ATTRS (ref)
1809 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1811 /* If this is already known to be a scalar or aggregate, we are done. */
1812 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1813 return;
1815 /* If it is a reference into an aggregate, this is part of an aggregate.
1816 Otherwise we don't know. */
1817 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1818 || TREE_CODE (t) == ARRAY_RANGE_REF
1819 || TREE_CODE (t) == BIT_FIELD_REF)
1820 MEM_IN_STRUCT_P (ref) = 1;
1823 /* Set the alias set of MEM to SET. */
1825 void
1826 set_mem_alias_set (mem, set)
1827 rtx mem;
1828 HOST_WIDE_INT set;
1830 #ifdef ENABLE_CHECKING
1831 /* If the new and old alias sets don't conflict, something is wrong. */
1832 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1833 abort ();
1834 #endif
1836 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1837 MEM_SIZE (mem), MEM_ALIGN (mem),
1838 GET_MODE (mem));
1841 /* Set the alignment of MEM to ALIGN bits. */
1843 void
1844 set_mem_align (mem, align)
1845 rtx mem;
1846 unsigned int align;
1848 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1849 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1850 GET_MODE (mem));
1853 /* Set the expr for MEM to EXPR. */
1855 void
1856 set_mem_expr (mem, expr)
1857 rtx mem;
1858 tree expr;
1860 MEM_ATTRS (mem)
1861 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1862 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1865 /* Set the offset of MEM to OFFSET. */
1867 void
1868 set_mem_offset (mem, offset)
1869 rtx mem, offset;
1871 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1872 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1873 GET_MODE (mem));
1876 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1877 and its address changed to ADDR. (VOIDmode means don't change the mode.
1878 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1879 returned memory location is required to be valid. The memory
1880 attributes are not changed. */
1882 static rtx
1883 change_address_1 (memref, mode, addr, validate)
1884 rtx memref;
1885 enum machine_mode mode;
1886 rtx addr;
1887 int validate;
1889 rtx new;
1891 if (GET_CODE (memref) != MEM)
1892 abort ();
1893 if (mode == VOIDmode)
1894 mode = GET_MODE (memref);
1895 if (addr == 0)
1896 addr = XEXP (memref, 0);
1898 if (validate)
1900 if (reload_in_progress || reload_completed)
1902 if (! memory_address_p (mode, addr))
1903 abort ();
1905 else
1906 addr = memory_address (mode, addr);
1909 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1910 return memref;
1912 new = gen_rtx_MEM (mode, addr);
1913 MEM_COPY_ATTRIBUTES (new, memref);
1914 return new;
1917 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1918 way we are changing MEMREF, so we only preserve the alias set. */
1921 change_address (memref, mode, addr)
1922 rtx memref;
1923 enum machine_mode mode;
1924 rtx addr;
1926 rtx new = change_address_1 (memref, mode, addr, 1);
1927 enum machine_mode mmode = GET_MODE (new);
1929 MEM_ATTRS (new)
1930 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1931 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1932 (mmode == BLKmode ? BITS_PER_UNIT
1933 : GET_MODE_ALIGNMENT (mmode)),
1934 mmode);
1936 return new;
1939 /* Return a memory reference like MEMREF, but with its mode changed
1940 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1941 nonzero, the memory address is forced to be valid.
1942 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1943 and caller is responsible for adjusting MEMREF base register. */
1946 adjust_address_1 (memref, mode, offset, validate, adjust)
1947 rtx memref;
1948 enum machine_mode mode;
1949 HOST_WIDE_INT offset;
1950 int validate, adjust;
1952 rtx addr = XEXP (memref, 0);
1953 rtx new;
1954 rtx memoffset = MEM_OFFSET (memref);
1955 rtx size = 0;
1956 unsigned int memalign = MEM_ALIGN (memref);
1958 /* ??? Prefer to create garbage instead of creating shared rtl.
1959 This may happen even if offset is non-zero -- consider
1960 (plus (plus reg reg) const_int) -- so do this always. */
1961 addr = copy_rtx (addr);
1963 if (adjust)
1965 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1966 object, we can merge it into the LO_SUM. */
1967 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1968 && offset >= 0
1969 && (unsigned HOST_WIDE_INT) offset
1970 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1971 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1972 plus_constant (XEXP (addr, 1), offset));
1973 else
1974 addr = plus_constant (addr, offset);
1977 new = change_address_1 (memref, mode, addr, validate);
1979 /* Compute the new values of the memory attributes due to this adjustment.
1980 We add the offsets and update the alignment. */
1981 if (memoffset)
1982 memoffset = GEN_INT (offset + INTVAL (memoffset));
1984 /* Compute the new alignment by taking the MIN of the alignment and the
1985 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1986 if zero. */
1987 if (offset != 0)
1988 memalign
1989 = MIN (memalign,
1990 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1992 /* We can compute the size in a number of ways. */
1993 if (GET_MODE (new) != BLKmode)
1994 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1995 else if (MEM_SIZE (memref))
1996 size = plus_constant (MEM_SIZE (memref), -offset);
1998 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1999 memoffset, size, memalign, GET_MODE (new));
2001 /* At some point, we should validate that this offset is within the object,
2002 if all the appropriate values are known. */
2003 return new;
2006 /* Return a memory reference like MEMREF, but with its mode changed
2007 to MODE and its address changed to ADDR, which is assumed to be
2008 MEMREF offseted by OFFSET bytes. If VALIDATE is
2009 nonzero, the memory address is forced to be valid. */
2012 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2013 rtx memref;
2014 enum machine_mode mode;
2015 rtx addr;
2016 HOST_WIDE_INT offset;
2017 int validate;
2019 memref = change_address_1 (memref, VOIDmode, addr, validate);
2020 return adjust_address_1 (memref, mode, offset, validate, 0);
2023 /* Return a memory reference like MEMREF, but whose address is changed by
2024 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2025 known to be in OFFSET (possibly 1). */
2028 offset_address (memref, offset, pow2)
2029 rtx memref;
2030 rtx offset;
2031 HOST_WIDE_INT pow2;
2033 rtx new, addr = XEXP (memref, 0);
2035 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2037 /* At this point we don't know _why_ the address is invalid. It
2038 could have secondary memory refereces, multiplies or anything.
2040 However, if we did go and rearrange things, we can wind up not
2041 being able to recognize the magic around pic_offset_table_rtx.
2042 This stuff is fragile, and is yet another example of why it is
2043 bad to expose PIC machinery too early. */
2044 if (! memory_address_p (GET_MODE (memref), new)
2045 && GET_CODE (addr) == PLUS
2046 && XEXP (addr, 0) == pic_offset_table_rtx)
2048 addr = force_reg (GET_MODE (addr), addr);
2049 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2052 update_temp_slot_address (XEXP (memref, 0), new);
2053 new = change_address_1 (memref, VOIDmode, new, 1);
2055 /* Update the alignment to reflect the offset. Reset the offset, which
2056 we don't know. */
2057 MEM_ATTRS (new)
2058 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2059 MIN (MEM_ALIGN (memref),
2060 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2061 GET_MODE (new));
2062 return new;
2065 /* Return a memory reference like MEMREF, but with its address changed to
2066 ADDR. The caller is asserting that the actual piece of memory pointed
2067 to is the same, just the form of the address is being changed, such as
2068 by putting something into a register. */
2071 replace_equiv_address (memref, addr)
2072 rtx memref;
2073 rtx addr;
2075 /* change_address_1 copies the memory attribute structure without change
2076 and that's exactly what we want here. */
2077 update_temp_slot_address (XEXP (memref, 0), addr);
2078 return change_address_1 (memref, VOIDmode, addr, 1);
2081 /* Likewise, but the reference is not required to be valid. */
2084 replace_equiv_address_nv (memref, addr)
2085 rtx memref;
2086 rtx addr;
2088 return change_address_1 (memref, VOIDmode, addr, 0);
2091 /* Return a memory reference like MEMREF, but with its mode widened to
2092 MODE and offset by OFFSET. This would be used by targets that e.g.
2093 cannot issue QImode memory operations and have to use SImode memory
2094 operations plus masking logic. */
2097 widen_memory_access (memref, mode, offset)
2098 rtx memref;
2099 enum machine_mode mode;
2100 HOST_WIDE_INT offset;
2102 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2103 tree expr = MEM_EXPR (new);
2104 rtx memoffset = MEM_OFFSET (new);
2105 unsigned int size = GET_MODE_SIZE (mode);
2107 /* If we don't know what offset we were at within the expression, then
2108 we can't know if we've overstepped the bounds. */
2109 if (! memoffset)
2110 expr = NULL_TREE;
2112 while (expr)
2114 if (TREE_CODE (expr) == COMPONENT_REF)
2116 tree field = TREE_OPERAND (expr, 1);
2118 if (! DECL_SIZE_UNIT (field))
2120 expr = NULL_TREE;
2121 break;
2124 /* Is the field at least as large as the access? If so, ok,
2125 otherwise strip back to the containing structure. */
2126 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2127 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2128 && INTVAL (memoffset) >= 0)
2129 break;
2131 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2133 expr = NULL_TREE;
2134 break;
2137 expr = TREE_OPERAND (expr, 0);
2138 memoffset = (GEN_INT (INTVAL (memoffset)
2139 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2140 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2141 / BITS_PER_UNIT)));
2143 /* Similarly for the decl. */
2144 else if (DECL_P (expr)
2145 && DECL_SIZE_UNIT (expr)
2146 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2147 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2148 && (! memoffset || INTVAL (memoffset) >= 0))
2149 break;
2150 else
2152 /* The widened memory access overflows the expression, which means
2153 that it could alias another expression. Zap it. */
2154 expr = NULL_TREE;
2155 break;
2159 if (! expr)
2160 memoffset = NULL_RTX;
2162 /* The widened memory may alias other stuff, so zap the alias set. */
2163 /* ??? Maybe use get_alias_set on any remaining expression. */
2165 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2166 MEM_ALIGN (new), mode);
2168 return new;
2171 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2174 gen_label_rtx ()
2176 rtx label;
2178 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2179 NULL_RTX, label_num++, NULL, NULL);
2181 LABEL_NUSES (label) = 0;
2182 LABEL_ALTERNATE_NAME (label) = NULL;
2183 return label;
2186 /* For procedure integration. */
2188 /* Install new pointers to the first and last insns in the chain.
2189 Also, set cur_insn_uid to one higher than the last in use.
2190 Used for an inline-procedure after copying the insn chain. */
2192 void
2193 set_new_first_and_last_insn (first, last)
2194 rtx first, last;
2196 rtx insn;
2198 first_insn = first;
2199 last_insn = last;
2200 cur_insn_uid = 0;
2202 for (insn = first; insn; insn = NEXT_INSN (insn))
2203 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2205 cur_insn_uid++;
2208 /* Set the range of label numbers found in the current function.
2209 This is used when belatedly compiling an inline function. */
2211 void
2212 set_new_first_and_last_label_num (first, last)
2213 int first, last;
2215 base_label_num = label_num;
2216 first_label_num = first;
2217 last_label_num = last;
2220 /* Set the last label number found in the current function.
2221 This is used when belatedly compiling an inline function. */
2223 void
2224 set_new_last_label_num (last)
2225 int last;
2227 base_label_num = label_num;
2228 last_label_num = last;
2231 /* Restore all variables describing the current status from the structure *P.
2232 This is used after a nested function. */
2234 void
2235 restore_emit_status (p)
2236 struct function *p ATTRIBUTE_UNUSED;
2238 last_label_num = 0;
2241 /* Clear out all parts of the state in F that can safely be discarded
2242 after the function has been compiled, to let garbage collection
2243 reclaim the memory. */
2245 void
2246 free_emit_status (f)
2247 struct function *f;
2249 free (f->emit->x_regno_reg_rtx);
2250 free (f->emit->regno_pointer_align);
2251 free (f->emit->regno_decl);
2252 free (f->emit);
2253 f->emit = NULL;
2256 /* Go through all the RTL insn bodies and copy any invalid shared
2257 structure. This routine should only be called once. */
2259 void
2260 unshare_all_rtl (fndecl, insn)
2261 tree fndecl;
2262 rtx insn;
2264 tree decl;
2266 /* Make sure that virtual parameters are not shared. */
2267 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2268 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2270 /* Make sure that virtual stack slots are not shared. */
2271 unshare_all_decls (DECL_INITIAL (fndecl));
2273 /* Unshare just about everything else. */
2274 unshare_all_rtl_1 (insn);
2276 /* Make sure the addresses of stack slots found outside the insn chain
2277 (such as, in DECL_RTL of a variable) are not shared
2278 with the insn chain.
2280 This special care is necessary when the stack slot MEM does not
2281 actually appear in the insn chain. If it does appear, its address
2282 is unshared from all else at that point. */
2283 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2286 /* Go through all the RTL insn bodies and copy any invalid shared
2287 structure, again. This is a fairly expensive thing to do so it
2288 should be done sparingly. */
2290 void
2291 unshare_all_rtl_again (insn)
2292 rtx insn;
2294 rtx p;
2295 tree decl;
2297 for (p = insn; p; p = NEXT_INSN (p))
2298 if (INSN_P (p))
2300 reset_used_flags (PATTERN (p));
2301 reset_used_flags (REG_NOTES (p));
2302 reset_used_flags (LOG_LINKS (p));
2305 /* Make sure that virtual stack slots are not shared. */
2306 reset_used_decls (DECL_INITIAL (cfun->decl));
2308 /* Make sure that virtual parameters are not shared. */
2309 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2310 reset_used_flags (DECL_RTL (decl));
2312 reset_used_flags (stack_slot_list);
2314 unshare_all_rtl (cfun->decl, insn);
2317 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2318 Assumes the mark bits are cleared at entry. */
2320 static void
2321 unshare_all_rtl_1 (insn)
2322 rtx insn;
2324 for (; insn; insn = NEXT_INSN (insn))
2325 if (INSN_P (insn))
2327 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2328 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2329 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2333 /* Go through all virtual stack slots of a function and copy any
2334 shared structure. */
2335 static void
2336 unshare_all_decls (blk)
2337 tree blk;
2339 tree t;
2341 /* Copy shared decls. */
2342 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2343 if (DECL_RTL_SET_P (t))
2344 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2346 /* Now process sub-blocks. */
2347 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2348 unshare_all_decls (t);
2351 /* Go through all virtual stack slots of a function and mark them as
2352 not shared. */
2353 static void
2354 reset_used_decls (blk)
2355 tree blk;
2357 tree t;
2359 /* Mark decls. */
2360 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2361 if (DECL_RTL_SET_P (t))
2362 reset_used_flags (DECL_RTL (t));
2364 /* Now process sub-blocks. */
2365 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2366 reset_used_decls (t);
2369 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2370 placed in the result directly, rather than being copied. MAY_SHARE is
2371 either a MEM of an EXPR_LIST of MEMs. */
2374 copy_most_rtx (orig, may_share)
2375 rtx orig;
2376 rtx may_share;
2378 rtx copy;
2379 int i, j;
2380 RTX_CODE code;
2381 const char *format_ptr;
2383 if (orig == may_share
2384 || (GET_CODE (may_share) == EXPR_LIST
2385 && in_expr_list_p (may_share, orig)))
2386 return orig;
2388 code = GET_CODE (orig);
2390 switch (code)
2392 case REG:
2393 case QUEUED:
2394 case CONST_INT:
2395 case CONST_DOUBLE:
2396 case CONST_VECTOR:
2397 case SYMBOL_REF:
2398 case CODE_LABEL:
2399 case PC:
2400 case CC0:
2401 return orig;
2402 default:
2403 break;
2406 copy = rtx_alloc (code);
2407 PUT_MODE (copy, GET_MODE (orig));
2408 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2409 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2410 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2411 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2412 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2414 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2416 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2418 switch (*format_ptr++)
2420 case 'e':
2421 XEXP (copy, i) = XEXP (orig, i);
2422 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2423 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2424 break;
2426 case 'u':
2427 XEXP (copy, i) = XEXP (orig, i);
2428 break;
2430 case 'E':
2431 case 'V':
2432 XVEC (copy, i) = XVEC (orig, i);
2433 if (XVEC (orig, i) != NULL)
2435 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2436 for (j = 0; j < XVECLEN (copy, i); j++)
2437 XVECEXP (copy, i, j)
2438 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2440 break;
2442 case 'w':
2443 XWINT (copy, i) = XWINT (orig, i);
2444 break;
2446 case 'n':
2447 case 'i':
2448 XINT (copy, i) = XINT (orig, i);
2449 break;
2451 case 't':
2452 XTREE (copy, i) = XTREE (orig, i);
2453 break;
2455 case 's':
2456 case 'S':
2457 XSTR (copy, i) = XSTR (orig, i);
2458 break;
2460 case '0':
2461 /* Copy this through the wide int field; that's safest. */
2462 X0WINT (copy, i) = X0WINT (orig, i);
2463 break;
2465 default:
2466 abort ();
2469 return copy;
2472 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2473 Recursively does the same for subexpressions. */
2476 copy_rtx_if_shared (orig)
2477 rtx orig;
2479 rtx x = orig;
2480 int i;
2481 enum rtx_code code;
2482 const char *format_ptr;
2483 int copied = 0;
2485 if (x == 0)
2486 return 0;
2488 code = GET_CODE (x);
2490 /* These types may be freely shared. */
2492 switch (code)
2494 case REG:
2495 case QUEUED:
2496 case CONST_INT:
2497 case CONST_DOUBLE:
2498 case CONST_VECTOR:
2499 case SYMBOL_REF:
2500 case CODE_LABEL:
2501 case PC:
2502 case CC0:
2503 case SCRATCH:
2504 /* SCRATCH must be shared because they represent distinct values. */
2505 return x;
2507 case CONST:
2508 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2509 a LABEL_REF, it isn't sharable. */
2510 if (GET_CODE (XEXP (x, 0)) == PLUS
2511 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2512 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2513 return x;
2514 break;
2516 case INSN:
2517 case JUMP_INSN:
2518 case CALL_INSN:
2519 case NOTE:
2520 case BARRIER:
2521 /* The chain of insns is not being copied. */
2522 return x;
2524 case MEM:
2525 /* A MEM is allowed to be shared if its address is constant.
2527 We used to allow sharing of MEMs which referenced
2528 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2529 that can lose. instantiate_virtual_regs will not unshare
2530 the MEMs, and combine may change the structure of the address
2531 because it looks safe and profitable in one context, but
2532 in some other context it creates unrecognizable RTL. */
2533 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2534 return x;
2536 break;
2538 default:
2539 break;
2542 /* This rtx may not be shared. If it has already been seen,
2543 replace it with a copy of itself. */
2545 if (RTX_FLAG (x, used))
2547 rtx copy;
2549 copy = rtx_alloc (code);
2550 memcpy (copy, x,
2551 (sizeof (*copy) - sizeof (copy->fld)
2552 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2553 x = copy;
2554 copied = 1;
2556 RTX_FLAG (x, used) = 1;
2558 /* Now scan the subexpressions recursively.
2559 We can store any replaced subexpressions directly into X
2560 since we know X is not shared! Any vectors in X
2561 must be copied if X was copied. */
2563 format_ptr = GET_RTX_FORMAT (code);
2565 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2567 switch (*format_ptr++)
2569 case 'e':
2570 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2571 break;
2573 case 'E':
2574 if (XVEC (x, i) != NULL)
2576 int j;
2577 int len = XVECLEN (x, i);
2579 if (copied && len > 0)
2580 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2581 for (j = 0; j < len; j++)
2582 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2584 break;
2587 return x;
2590 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2591 to look for shared sub-parts. */
2593 void
2594 reset_used_flags (x)
2595 rtx x;
2597 int i, j;
2598 enum rtx_code code;
2599 const char *format_ptr;
2601 if (x == 0)
2602 return;
2604 code = GET_CODE (x);
2606 /* These types may be freely shared so we needn't do any resetting
2607 for them. */
2609 switch (code)
2611 case REG:
2612 case QUEUED:
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST_VECTOR:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 return;
2622 case INSN:
2623 case JUMP_INSN:
2624 case CALL_INSN:
2625 case NOTE:
2626 case LABEL_REF:
2627 case BARRIER:
2628 /* The chain of insns is not being copied. */
2629 return;
2631 default:
2632 break;
2635 RTX_FLAG (x, used) = 0;
2637 format_ptr = GET_RTX_FORMAT (code);
2638 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2640 switch (*format_ptr++)
2642 case 'e':
2643 reset_used_flags (XEXP (x, i));
2644 break;
2646 case 'E':
2647 for (j = 0; j < XVECLEN (x, i); j++)
2648 reset_used_flags (XVECEXP (x, i, j));
2649 break;
2654 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2655 Return X or the rtx for the pseudo reg the value of X was copied into.
2656 OTHER must be valid as a SET_DEST. */
2659 make_safe_from (x, other)
2660 rtx x, other;
2662 while (1)
2663 switch (GET_CODE (other))
2665 case SUBREG:
2666 other = SUBREG_REG (other);
2667 break;
2668 case STRICT_LOW_PART:
2669 case SIGN_EXTEND:
2670 case ZERO_EXTEND:
2671 other = XEXP (other, 0);
2672 break;
2673 default:
2674 goto done;
2676 done:
2677 if ((GET_CODE (other) == MEM
2678 && ! CONSTANT_P (x)
2679 && GET_CODE (x) != REG
2680 && GET_CODE (x) != SUBREG)
2681 || (GET_CODE (other) == REG
2682 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2683 || reg_mentioned_p (other, x))))
2685 rtx temp = gen_reg_rtx (GET_MODE (x));
2686 emit_move_insn (temp, x);
2687 return temp;
2689 return x;
2692 /* Emission of insns (adding them to the doubly-linked list). */
2694 /* Return the first insn of the current sequence or current function. */
2697 get_insns ()
2699 return first_insn;
2702 /* Specify a new insn as the first in the chain. */
2704 void
2705 set_first_insn (insn)
2706 rtx insn;
2708 if (PREV_INSN (insn) != 0)
2709 abort ();
2710 first_insn = insn;
2713 /* Return the last insn emitted in current sequence or current function. */
2716 get_last_insn ()
2718 return last_insn;
2721 /* Specify a new insn as the last in the chain. */
2723 void
2724 set_last_insn (insn)
2725 rtx insn;
2727 if (NEXT_INSN (insn) != 0)
2728 abort ();
2729 last_insn = insn;
2732 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2735 get_last_insn_anywhere ()
2737 struct sequence_stack *stack;
2738 if (last_insn)
2739 return last_insn;
2740 for (stack = seq_stack; stack; stack = stack->next)
2741 if (stack->last != 0)
2742 return stack->last;
2743 return 0;
2746 /* Return a number larger than any instruction's uid in this function. */
2749 get_max_uid ()
2751 return cur_insn_uid;
2754 /* Renumber instructions so that no instruction UIDs are wasted. */
2756 void
2757 renumber_insns (stream)
2758 FILE *stream;
2760 rtx insn;
2762 /* If we're not supposed to renumber instructions, don't. */
2763 if (!flag_renumber_insns)
2764 return;
2766 /* If there aren't that many instructions, then it's not really
2767 worth renumbering them. */
2768 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2769 return;
2771 cur_insn_uid = 1;
2773 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2775 if (stream)
2776 fprintf (stream, "Renumbering insn %d to %d\n",
2777 INSN_UID (insn), cur_insn_uid);
2778 INSN_UID (insn) = cur_insn_uid++;
2782 /* Return the next insn. If it is a SEQUENCE, return the first insn
2783 of the sequence. */
2786 next_insn (insn)
2787 rtx insn;
2789 if (insn)
2791 insn = NEXT_INSN (insn);
2792 if (insn && GET_CODE (insn) == INSN
2793 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2794 insn = XVECEXP (PATTERN (insn), 0, 0);
2797 return insn;
2800 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2801 of the sequence. */
2804 previous_insn (insn)
2805 rtx insn;
2807 if (insn)
2809 insn = PREV_INSN (insn);
2810 if (insn && GET_CODE (insn) == INSN
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2815 return insn;
2818 /* Return the next insn after INSN that is not a NOTE. This routine does not
2819 look inside SEQUENCEs. */
2822 next_nonnote_insn (insn)
2823 rtx insn;
2825 while (insn)
2827 insn = NEXT_INSN (insn);
2828 if (insn == 0 || GET_CODE (insn) != NOTE)
2829 break;
2832 return insn;
2835 /* Return the previous insn before INSN that is not a NOTE. This routine does
2836 not look inside SEQUENCEs. */
2839 prev_nonnote_insn (insn)
2840 rtx insn;
2842 while (insn)
2844 insn = PREV_INSN (insn);
2845 if (insn == 0 || GET_CODE (insn) != NOTE)
2846 break;
2849 return insn;
2852 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2853 or 0, if there is none. This routine does not look inside
2854 SEQUENCEs. */
2857 next_real_insn (insn)
2858 rtx insn;
2860 while (insn)
2862 insn = NEXT_INSN (insn);
2863 if (insn == 0 || GET_CODE (insn) == INSN
2864 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2865 break;
2868 return insn;
2871 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2872 or 0, if there is none. This routine does not look inside
2873 SEQUENCEs. */
2876 prev_real_insn (insn)
2877 rtx insn;
2879 while (insn)
2881 insn = PREV_INSN (insn);
2882 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2883 || GET_CODE (insn) == JUMP_INSN)
2884 break;
2887 return insn;
2890 /* Find the next insn after INSN that really does something. This routine
2891 does not look inside SEQUENCEs. Until reload has completed, this is the
2892 same as next_real_insn. */
2895 active_insn_p (insn)
2896 rtx insn;
2898 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2899 || (GET_CODE (insn) == INSN
2900 && (! reload_completed
2901 || (GET_CODE (PATTERN (insn)) != USE
2902 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2906 next_active_insn (insn)
2907 rtx insn;
2909 while (insn)
2911 insn = NEXT_INSN (insn);
2912 if (insn == 0 || active_insn_p (insn))
2913 break;
2916 return insn;
2919 /* Find the last insn before INSN that really does something. This routine
2920 does not look inside SEQUENCEs. Until reload has completed, this is the
2921 same as prev_real_insn. */
2924 prev_active_insn (insn)
2925 rtx insn;
2927 while (insn)
2929 insn = PREV_INSN (insn);
2930 if (insn == 0 || active_insn_p (insn))
2931 break;
2934 return insn;
2937 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2940 next_label (insn)
2941 rtx insn;
2943 while (insn)
2945 insn = NEXT_INSN (insn);
2946 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2947 break;
2950 return insn;
2953 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2956 prev_label (insn)
2957 rtx insn;
2959 while (insn)
2961 insn = PREV_INSN (insn);
2962 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2963 break;
2966 return insn;
2969 #ifdef HAVE_cc0
2970 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2971 and REG_CC_USER notes so we can find it. */
2973 void
2974 link_cc0_insns (insn)
2975 rtx insn;
2977 rtx user = next_nonnote_insn (insn);
2979 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2980 user = XVECEXP (PATTERN (user), 0, 0);
2982 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2983 REG_NOTES (user));
2984 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2987 /* Return the next insn that uses CC0 after INSN, which is assumed to
2988 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2989 applied to the result of this function should yield INSN).
2991 Normally, this is simply the next insn. However, if a REG_CC_USER note
2992 is present, it contains the insn that uses CC0.
2994 Return 0 if we can't find the insn. */
2997 next_cc0_user (insn)
2998 rtx insn;
3000 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3002 if (note)
3003 return XEXP (note, 0);
3005 insn = next_nonnote_insn (insn);
3006 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3007 insn = XVECEXP (PATTERN (insn), 0, 0);
3009 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3010 return insn;
3012 return 0;
3015 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3016 note, it is the previous insn. */
3019 prev_cc0_setter (insn)
3020 rtx insn;
3022 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3024 if (note)
3025 return XEXP (note, 0);
3027 insn = prev_nonnote_insn (insn);
3028 if (! sets_cc0_p (PATTERN (insn)))
3029 abort ();
3031 return insn;
3033 #endif
3035 /* Increment the label uses for all labels present in rtx. */
3037 static void
3038 mark_label_nuses (x)
3039 rtx x;
3041 enum rtx_code code;
3042 int i, j;
3043 const char *fmt;
3045 code = GET_CODE (x);
3046 if (code == LABEL_REF)
3047 LABEL_NUSES (XEXP (x, 0))++;
3049 fmt = GET_RTX_FORMAT (code);
3050 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3052 if (fmt[i] == 'e')
3053 mark_label_nuses (XEXP (x, i));
3054 else if (fmt[i] == 'E')
3055 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3056 mark_label_nuses (XVECEXP (x, i, j));
3061 /* Try splitting insns that can be split for better scheduling.
3062 PAT is the pattern which might split.
3063 TRIAL is the insn providing PAT.
3064 LAST is non-zero if we should return the last insn of the sequence produced.
3066 If this routine succeeds in splitting, it returns the first or last
3067 replacement insn depending on the value of LAST. Otherwise, it
3068 returns TRIAL. If the insn to be returned can be split, it will be. */
3071 try_split (pat, trial, last)
3072 rtx pat, trial;
3073 int last;
3075 rtx before = PREV_INSN (trial);
3076 rtx after = NEXT_INSN (trial);
3077 int has_barrier = 0;
3078 rtx tem;
3079 rtx note, seq;
3080 int probability;
3082 if (any_condjump_p (trial)
3083 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3084 split_branch_probability = INTVAL (XEXP (note, 0));
3085 probability = split_branch_probability;
3087 seq = split_insns (pat, trial);
3089 split_branch_probability = -1;
3091 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3092 We may need to handle this specially. */
3093 if (after && GET_CODE (after) == BARRIER)
3095 has_barrier = 1;
3096 after = NEXT_INSN (after);
3099 if (seq)
3101 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3102 The latter case will normally arise only when being done so that
3103 it, in turn, will be split (SFmode on the 29k is an example). */
3104 if (GET_CODE (seq) == SEQUENCE)
3106 int i, njumps = 0;
3108 /* Avoid infinite loop if any insn of the result matches
3109 the original pattern. */
3110 for (i = 0; i < XVECLEN (seq, 0); i++)
3111 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3112 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3113 return trial;
3115 /* Mark labels. */
3116 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3117 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3119 rtx insn = XVECEXP (seq, 0, i);
3120 mark_jump_label (PATTERN (insn),
3121 XVECEXP (seq, 0, i), 0);
3122 njumps++;
3123 if (probability != -1
3124 && any_condjump_p (insn)
3125 && !find_reg_note (insn, REG_BR_PROB, 0))
3127 /* We can preserve the REG_BR_PROB notes only if exactly
3128 one jump is created, otherwise the machine description
3129 is responsible for this step using
3130 split_branch_probability variable. */
3131 if (njumps != 1)
3132 abort ();
3133 REG_NOTES (insn)
3134 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3135 GEN_INT (probability),
3136 REG_NOTES (insn));
3140 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3141 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3142 if (GET_CODE (trial) == CALL_INSN)
3143 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3144 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3145 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3146 = CALL_INSN_FUNCTION_USAGE (trial);
3148 /* Copy notes, particularly those related to the CFG. */
3149 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3151 switch (REG_NOTE_KIND (note))
3153 case REG_EH_REGION:
3154 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3156 rtx insn = XVECEXP (seq, 0, i);
3157 if (GET_CODE (insn) == CALL_INSN
3158 || (flag_non_call_exceptions
3159 && may_trap_p (PATTERN (insn))))
3160 REG_NOTES (insn)
3161 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3162 XEXP (note, 0),
3163 REG_NOTES (insn));
3165 break;
3167 case REG_NORETURN:
3168 case REG_SETJMP:
3169 case REG_ALWAYS_RETURN:
3170 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3172 rtx insn = XVECEXP (seq, 0, i);
3173 if (GET_CODE (insn) == CALL_INSN)
3174 REG_NOTES (insn)
3175 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3176 XEXP (note, 0),
3177 REG_NOTES (insn));
3179 break;
3181 case REG_NON_LOCAL_GOTO:
3182 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3184 rtx insn = XVECEXP (seq, 0, i);
3185 if (GET_CODE (insn) == JUMP_INSN)
3186 REG_NOTES (insn)
3187 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3188 XEXP (note, 0),
3189 REG_NOTES (insn));
3191 break;
3193 default:
3194 break;
3198 /* If there are LABELS inside the split insns increment the
3199 usage count so we don't delete the label. */
3200 if (GET_CODE (trial) == INSN)
3201 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3202 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3203 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3205 tem = emit_insn_after (seq, trial);
3207 delete_insn (trial);
3208 if (has_barrier)
3209 emit_barrier_after (tem);
3211 /* Recursively call try_split for each new insn created; by the
3212 time control returns here that insn will be fully split, so
3213 set LAST and continue from the insn after the one returned.
3214 We can't use next_active_insn here since AFTER may be a note.
3215 Ignore deleted insns, which can be occur if not optimizing. */
3216 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3217 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3218 tem = try_split (PATTERN (tem), tem, 1);
3220 /* Avoid infinite loop if the result matches the original pattern. */
3221 else if (rtx_equal_p (seq, pat))
3222 return trial;
3223 else
3225 PATTERN (trial) = seq;
3226 INSN_CODE (trial) = -1;
3227 try_split (seq, trial, last);
3230 /* Return either the first or the last insn, depending on which was
3231 requested. */
3232 return last
3233 ? (after ? PREV_INSN (after) : last_insn)
3234 : NEXT_INSN (before);
3237 return trial;
3240 /* Make and return an INSN rtx, initializing all its slots.
3241 Store PATTERN in the pattern slots. */
3244 make_insn_raw (pattern)
3245 rtx pattern;
3247 rtx insn;
3249 insn = rtx_alloc (INSN);
3251 INSN_UID (insn) = cur_insn_uid++;
3252 PATTERN (insn) = pattern;
3253 INSN_CODE (insn) = -1;
3254 LOG_LINKS (insn) = NULL;
3255 REG_NOTES (insn) = NULL;
3257 #ifdef ENABLE_RTL_CHECKING
3258 if (insn
3259 && INSN_P (insn)
3260 && (returnjump_p (insn)
3261 || (GET_CODE (insn) == SET
3262 && SET_DEST (insn) == pc_rtx)))
3264 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3265 debug_rtx (insn);
3267 #endif
3269 return insn;
3272 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3274 static rtx
3275 make_jump_insn_raw (pattern)
3276 rtx pattern;
3278 rtx insn;
3280 insn = rtx_alloc (JUMP_INSN);
3281 INSN_UID (insn) = cur_insn_uid++;
3283 PATTERN (insn) = pattern;
3284 INSN_CODE (insn) = -1;
3285 LOG_LINKS (insn) = NULL;
3286 REG_NOTES (insn) = NULL;
3287 JUMP_LABEL (insn) = NULL;
3289 return insn;
3292 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3294 static rtx
3295 make_call_insn_raw (pattern)
3296 rtx pattern;
3298 rtx insn;
3300 insn = rtx_alloc (CALL_INSN);
3301 INSN_UID (insn) = cur_insn_uid++;
3303 PATTERN (insn) = pattern;
3304 INSN_CODE (insn) = -1;
3305 LOG_LINKS (insn) = NULL;
3306 REG_NOTES (insn) = NULL;
3307 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3309 return insn;
3312 /* Add INSN to the end of the doubly-linked list.
3313 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3315 void
3316 add_insn (insn)
3317 rtx insn;
3319 PREV_INSN (insn) = last_insn;
3320 NEXT_INSN (insn) = 0;
3322 if (NULL != last_insn)
3323 NEXT_INSN (last_insn) = insn;
3325 if (NULL == first_insn)
3326 first_insn = insn;
3328 last_insn = insn;
3331 /* Add INSN into the doubly-linked list after insn AFTER. This and
3332 the next should be the only functions called to insert an insn once
3333 delay slots have been filled since only they know how to update a
3334 SEQUENCE. */
3336 void
3337 add_insn_after (insn, after)
3338 rtx insn, after;
3340 rtx next = NEXT_INSN (after);
3341 basic_block bb;
3343 if (optimize && INSN_DELETED_P (after))
3344 abort ();
3346 NEXT_INSN (insn) = next;
3347 PREV_INSN (insn) = after;
3349 if (next)
3351 PREV_INSN (next) = insn;
3352 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3353 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3355 else if (last_insn == after)
3356 last_insn = insn;
3357 else
3359 struct sequence_stack *stack = seq_stack;
3360 /* Scan all pending sequences too. */
3361 for (; stack; stack = stack->next)
3362 if (after == stack->last)
3364 stack->last = insn;
3365 break;
3368 if (stack == 0)
3369 abort ();
3372 if (basic_block_for_insn
3373 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3374 && (bb = BLOCK_FOR_INSN (after)))
3376 set_block_for_insn (insn, bb);
3377 if (INSN_P (insn))
3378 bb->flags |= BB_DIRTY;
3379 /* Should not happen as first in the BB is always
3380 either NOTE or LABEL. */
3381 if (bb->end == after
3382 /* Avoid clobbering of structure when creating new BB. */
3383 && GET_CODE (insn) != BARRIER
3384 && (GET_CODE (insn) != NOTE
3385 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3386 bb->end = insn;
3389 NEXT_INSN (after) = insn;
3390 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3392 rtx sequence = PATTERN (after);
3393 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3397 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3398 the previous should be the only functions called to insert an insn once
3399 delay slots have been filled since only they know how to update a
3400 SEQUENCE. */
3402 void
3403 add_insn_before (insn, before)
3404 rtx insn, before;
3406 rtx prev = PREV_INSN (before);
3407 basic_block bb;
3409 if (optimize && INSN_DELETED_P (before))
3410 abort ();
3412 PREV_INSN (insn) = prev;
3413 NEXT_INSN (insn) = before;
3415 if (prev)
3417 NEXT_INSN (prev) = insn;
3418 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3420 rtx sequence = PATTERN (prev);
3421 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3424 else if (first_insn == before)
3425 first_insn = insn;
3426 else
3428 struct sequence_stack *stack = seq_stack;
3429 /* Scan all pending sequences too. */
3430 for (; stack; stack = stack->next)
3431 if (before == stack->first)
3433 stack->first = insn;
3434 break;
3437 if (stack == 0)
3438 abort ();
3441 if (basic_block_for_insn
3442 && (unsigned int) INSN_UID (before) < basic_block_for_insn->num_elements
3443 && (bb = BLOCK_FOR_INSN (before)))
3445 set_block_for_insn (insn, bb);
3446 if (INSN_P (insn))
3447 bb->flags |= BB_DIRTY;
3448 /* Should not happen as first in the BB is always
3449 either NOTE or LABEl. */
3450 if (bb->head == insn
3451 /* Avoid clobbering of structure when creating new BB. */
3452 && GET_CODE (insn) != BARRIER
3453 && (GET_CODE (insn) != NOTE
3454 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3455 abort ();
3458 PREV_INSN (before) = insn;
3459 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3460 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3463 /* Remove an insn from its doubly-linked list. This function knows how
3464 to handle sequences. */
3465 void
3466 remove_insn (insn)
3467 rtx insn;
3469 rtx next = NEXT_INSN (insn);
3470 rtx prev = PREV_INSN (insn);
3471 basic_block bb;
3473 if (prev)
3475 NEXT_INSN (prev) = next;
3476 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3478 rtx sequence = PATTERN (prev);
3479 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3482 else if (first_insn == insn)
3483 first_insn = next;
3484 else
3486 struct sequence_stack *stack = seq_stack;
3487 /* Scan all pending sequences too. */
3488 for (; stack; stack = stack->next)
3489 if (insn == stack->first)
3491 stack->first = next;
3492 break;
3495 if (stack == 0)
3496 abort ();
3499 if (next)
3501 PREV_INSN (next) = prev;
3502 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3503 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3505 else if (last_insn == insn)
3506 last_insn = prev;
3507 else
3509 struct sequence_stack *stack = seq_stack;
3510 /* Scan all pending sequences too. */
3511 for (; stack; stack = stack->next)
3512 if (insn == stack->last)
3514 stack->last = prev;
3515 break;
3518 if (stack == 0)
3519 abort ();
3521 if (basic_block_for_insn
3522 && (unsigned int) INSN_UID (insn) < basic_block_for_insn->num_elements
3523 && (bb = BLOCK_FOR_INSN (insn)))
3525 if (INSN_P (insn))
3526 bb->flags |= BB_DIRTY;
3527 if (bb->head == insn)
3529 /* Never ever delete the basic block note without deleting whole
3530 basic block. */
3531 if (GET_CODE (insn) == NOTE)
3532 abort ();
3533 bb->head = next;
3535 if (bb->end == insn)
3536 bb->end = prev;
3540 /* Delete all insns made since FROM.
3541 FROM becomes the new last instruction. */
3543 void
3544 delete_insns_since (from)
3545 rtx from;
3547 if (from == 0)
3548 first_insn = 0;
3549 else
3550 NEXT_INSN (from) = 0;
3551 last_insn = from;
3554 /* This function is deprecated, please use sequences instead.
3556 Move a consecutive bunch of insns to a different place in the chain.
3557 The insns to be moved are those between FROM and TO.
3558 They are moved to a new position after the insn AFTER.
3559 AFTER must not be FROM or TO or any insn in between.
3561 This function does not know about SEQUENCEs and hence should not be
3562 called after delay-slot filling has been done. */
3564 void
3565 reorder_insns_nobb (from, to, after)
3566 rtx from, to, after;
3568 /* Splice this bunch out of where it is now. */
3569 if (PREV_INSN (from))
3570 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3571 if (NEXT_INSN (to))
3572 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3573 if (last_insn == to)
3574 last_insn = PREV_INSN (from);
3575 if (first_insn == from)
3576 first_insn = NEXT_INSN (to);
3578 /* Make the new neighbors point to it and it to them. */
3579 if (NEXT_INSN (after))
3580 PREV_INSN (NEXT_INSN (after)) = to;
3582 NEXT_INSN (to) = NEXT_INSN (after);
3583 PREV_INSN (from) = after;
3584 NEXT_INSN (after) = from;
3585 if (after == last_insn)
3586 last_insn = to;
3589 /* Same as function above, but take care to update BB boundaries. */
3590 void
3591 reorder_insns (from, to, after)
3592 rtx from, to, after;
3594 rtx prev = PREV_INSN (from);
3595 basic_block bb, bb2;
3597 reorder_insns_nobb (from, to, after);
3599 if (basic_block_for_insn
3600 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3601 && (bb = BLOCK_FOR_INSN (after)))
3603 rtx x;
3604 bb->flags |= BB_DIRTY;
3606 if (basic_block_for_insn
3607 && ((unsigned int) INSN_UID (from)
3608 < basic_block_for_insn->num_elements)
3609 && (bb2 = BLOCK_FOR_INSN (from)))
3611 if (bb2->end == to)
3612 bb2->end = prev;
3613 bb2->flags |= BB_DIRTY;
3616 if (bb->end == after)
3617 bb->end = to;
3619 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3620 set_block_for_insn (x, bb);
3624 /* Return the line note insn preceding INSN. */
3626 static rtx
3627 find_line_note (insn)
3628 rtx insn;
3630 if (no_line_numbers)
3631 return 0;
3633 for (; insn; insn = PREV_INSN (insn))
3634 if (GET_CODE (insn) == NOTE
3635 && NOTE_LINE_NUMBER (insn) >= 0)
3636 break;
3638 return insn;
3641 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3642 of the moved insns when debugging. This may insert a note between AFTER
3643 and FROM, and another one after TO. */
3645 void
3646 reorder_insns_with_line_notes (from, to, after)
3647 rtx from, to, after;
3649 rtx from_line = find_line_note (from);
3650 rtx after_line = find_line_note (after);
3652 reorder_insns (from, to, after);
3654 if (from_line == after_line)
3655 return;
3657 if (from_line)
3658 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3659 NOTE_LINE_NUMBER (from_line),
3660 after);
3661 if (after_line)
3662 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3663 NOTE_LINE_NUMBER (after_line),
3664 to);
3667 /* Remove unnecessary notes from the instruction stream. */
3669 void
3670 remove_unnecessary_notes ()
3672 rtx block_stack = NULL_RTX;
3673 rtx eh_stack = NULL_RTX;
3674 rtx insn;
3675 rtx next;
3676 rtx tmp;
3678 /* We must not remove the first instruction in the function because
3679 the compiler depends on the first instruction being a note. */
3680 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3682 /* Remember what's next. */
3683 next = NEXT_INSN (insn);
3685 /* We're only interested in notes. */
3686 if (GET_CODE (insn) != NOTE)
3687 continue;
3689 switch (NOTE_LINE_NUMBER (insn))
3691 case NOTE_INSN_DELETED:
3692 case NOTE_INSN_LOOP_END_TOP_COND:
3693 remove_insn (insn);
3694 break;
3696 case NOTE_INSN_EH_REGION_BEG:
3697 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3698 break;
3700 case NOTE_INSN_EH_REGION_END:
3701 /* Too many end notes. */
3702 if (eh_stack == NULL_RTX)
3703 abort ();
3704 /* Mismatched nesting. */
3705 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3706 abort ();
3707 tmp = eh_stack;
3708 eh_stack = XEXP (eh_stack, 1);
3709 free_INSN_LIST_node (tmp);
3710 break;
3712 case NOTE_INSN_BLOCK_BEG:
3713 /* By now, all notes indicating lexical blocks should have
3714 NOTE_BLOCK filled in. */
3715 if (NOTE_BLOCK (insn) == NULL_TREE)
3716 abort ();
3717 block_stack = alloc_INSN_LIST (insn, block_stack);
3718 break;
3720 case NOTE_INSN_BLOCK_END:
3721 /* Too many end notes. */
3722 if (block_stack == NULL_RTX)
3723 abort ();
3724 /* Mismatched nesting. */
3725 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3726 abort ();
3727 tmp = block_stack;
3728 block_stack = XEXP (block_stack, 1);
3729 free_INSN_LIST_node (tmp);
3731 /* Scan back to see if there are any non-note instructions
3732 between INSN and the beginning of this block. If not,
3733 then there is no PC range in the generated code that will
3734 actually be in this block, so there's no point in
3735 remembering the existence of the block. */
3736 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3738 /* This block contains a real instruction. Note that we
3739 don't include labels; if the only thing in the block
3740 is a label, then there are still no PC values that
3741 lie within the block. */
3742 if (INSN_P (tmp))
3743 break;
3745 /* We're only interested in NOTEs. */
3746 if (GET_CODE (tmp) != NOTE)
3747 continue;
3749 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3751 /* We just verified that this BLOCK matches us with
3752 the block_stack check above. Never delete the
3753 BLOCK for the outermost scope of the function; we
3754 can refer to names from that scope even if the
3755 block notes are messed up. */
3756 if (! is_body_block (NOTE_BLOCK (insn))
3757 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3759 remove_insn (tmp);
3760 remove_insn (insn);
3762 break;
3764 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3765 /* There's a nested block. We need to leave the
3766 current block in place since otherwise the debugger
3767 wouldn't be able to show symbols from our block in
3768 the nested block. */
3769 break;
3774 /* Too many begin notes. */
3775 if (block_stack || eh_stack)
3776 abort ();
3780 /* Emit an insn of given code and pattern
3781 at a specified place within the doubly-linked list. */
3783 /* Make an instruction with body PATTERN
3784 and output it before the instruction BEFORE. */
3787 emit_insn_before (pattern, before)
3788 rtx pattern, before;
3790 rtx insn = before;
3792 if (GET_CODE (pattern) == SEQUENCE)
3794 int i;
3796 for (i = 0; i < XVECLEN (pattern, 0); i++)
3798 insn = XVECEXP (pattern, 0, i);
3799 add_insn_before (insn, before);
3802 else
3804 insn = make_insn_raw (pattern);
3805 add_insn_before (insn, before);
3808 return insn;
3811 /* Make an instruction with body PATTERN and code JUMP_INSN
3812 and output it before the instruction BEFORE. */
3815 emit_jump_insn_before (pattern, before)
3816 rtx pattern, before;
3818 rtx insn;
3820 if (GET_CODE (pattern) == SEQUENCE)
3821 insn = emit_insn_before (pattern, before);
3822 else
3824 insn = make_jump_insn_raw (pattern);
3825 add_insn_before (insn, before);
3828 return insn;
3831 /* Make an instruction with body PATTERN and code CALL_INSN
3832 and output it before the instruction BEFORE. */
3835 emit_call_insn_before (pattern, before)
3836 rtx pattern, before;
3838 rtx insn;
3840 if (GET_CODE (pattern) == SEQUENCE)
3841 insn = emit_insn_before (pattern, before);
3842 else
3844 insn = make_call_insn_raw (pattern);
3845 add_insn_before (insn, before);
3846 PUT_CODE (insn, CALL_INSN);
3849 return insn;
3852 /* Make an instruction with body PATTERN and code CALL_INSN
3853 and output it before the instruction BEFORE. */
3856 emit_call_insn_after (pattern, before)
3857 rtx pattern, before;
3859 rtx insn;
3861 if (GET_CODE (pattern) == SEQUENCE)
3862 insn = emit_insn_after (pattern, before);
3863 else
3865 insn = make_call_insn_raw (pattern);
3866 add_insn_after (insn, before);
3867 PUT_CODE (insn, CALL_INSN);
3870 return insn;
3873 /* Make an insn of code BARRIER
3874 and output it before the insn BEFORE. */
3877 emit_barrier_before (before)
3878 rtx before;
3880 rtx insn = rtx_alloc (BARRIER);
3882 INSN_UID (insn) = cur_insn_uid++;
3884 add_insn_before (insn, before);
3885 return insn;
3888 /* Emit the label LABEL before the insn BEFORE. */
3891 emit_label_before (label, before)
3892 rtx label, before;
3894 /* This can be called twice for the same label as a result of the
3895 confusion that follows a syntax error! So make it harmless. */
3896 if (INSN_UID (label) == 0)
3898 INSN_UID (label) = cur_insn_uid++;
3899 add_insn_before (label, before);
3902 return label;
3905 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3908 emit_note_before (subtype, before)
3909 int subtype;
3910 rtx before;
3912 rtx note = rtx_alloc (NOTE);
3913 INSN_UID (note) = cur_insn_uid++;
3914 NOTE_SOURCE_FILE (note) = 0;
3915 NOTE_LINE_NUMBER (note) = subtype;
3917 add_insn_before (note, before);
3918 return note;
3921 /* Make an insn of code INSN with body PATTERN
3922 and output it after the insn AFTER. */
3925 emit_insn_after (pattern, after)
3926 rtx pattern, after;
3928 rtx insn = after;
3930 if (GET_CODE (pattern) == SEQUENCE)
3932 int i;
3934 for (i = 0; i < XVECLEN (pattern, 0); i++)
3936 insn = XVECEXP (pattern, 0, i);
3937 add_insn_after (insn, after);
3938 after = insn;
3941 else
3943 insn = make_insn_raw (pattern);
3944 add_insn_after (insn, after);
3947 return insn;
3950 /* Similar to emit_insn_after, except that line notes are to be inserted so
3951 as to act as if this insn were at FROM. */
3953 void
3954 emit_insn_after_with_line_notes (pattern, after, from)
3955 rtx pattern, after, from;
3957 rtx from_line = find_line_note (from);
3958 rtx after_line = find_line_note (after);
3959 rtx insn = emit_insn_after (pattern, after);
3961 if (from_line)
3962 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3963 NOTE_LINE_NUMBER (from_line),
3964 after);
3966 if (after_line)
3967 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3968 NOTE_LINE_NUMBER (after_line),
3969 insn);
3972 /* Make an insn of code JUMP_INSN with body PATTERN
3973 and output it after the insn AFTER. */
3976 emit_jump_insn_after (pattern, after)
3977 rtx pattern, after;
3979 rtx insn;
3981 if (GET_CODE (pattern) == SEQUENCE)
3982 insn = emit_insn_after (pattern, after);
3983 else
3985 insn = make_jump_insn_raw (pattern);
3986 add_insn_after (insn, after);
3989 return insn;
3992 /* Make an insn of code BARRIER
3993 and output it after the insn AFTER. */
3996 emit_barrier_after (after)
3997 rtx after;
3999 rtx insn = rtx_alloc (BARRIER);
4001 INSN_UID (insn) = cur_insn_uid++;
4003 add_insn_after (insn, after);
4004 return insn;
4007 /* Emit the label LABEL after the insn AFTER. */
4010 emit_label_after (label, after)
4011 rtx label, after;
4013 /* This can be called twice for the same label
4014 as a result of the confusion that follows a syntax error!
4015 So make it harmless. */
4016 if (INSN_UID (label) == 0)
4018 INSN_UID (label) = cur_insn_uid++;
4019 add_insn_after (label, after);
4022 return label;
4025 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4028 emit_note_after (subtype, after)
4029 int subtype;
4030 rtx after;
4032 rtx note = rtx_alloc (NOTE);
4033 INSN_UID (note) = cur_insn_uid++;
4034 NOTE_SOURCE_FILE (note) = 0;
4035 NOTE_LINE_NUMBER (note) = subtype;
4036 add_insn_after (note, after);
4037 return note;
4040 /* Emit a line note for FILE and LINE after the insn AFTER. */
4043 emit_line_note_after (file, line, after)
4044 const char *file;
4045 int line;
4046 rtx after;
4048 rtx note;
4050 if (no_line_numbers && line > 0)
4052 cur_insn_uid++;
4053 return 0;
4056 note = rtx_alloc (NOTE);
4057 INSN_UID (note) = cur_insn_uid++;
4058 NOTE_SOURCE_FILE (note) = file;
4059 NOTE_LINE_NUMBER (note) = line;
4060 add_insn_after (note, after);
4061 return note;
4064 /* Make an insn of code INSN with pattern PATTERN
4065 and add it to the end of the doubly-linked list.
4066 If PATTERN is a SEQUENCE, take the elements of it
4067 and emit an insn for each element.
4069 Returns the last insn emitted. */
4072 emit_insn (pattern)
4073 rtx pattern;
4075 rtx insn = last_insn;
4077 if (GET_CODE (pattern) == SEQUENCE)
4079 int i;
4081 for (i = 0; i < XVECLEN (pattern, 0); i++)
4083 insn = XVECEXP (pattern, 0, i);
4084 add_insn (insn);
4087 else
4089 insn = make_insn_raw (pattern);
4090 add_insn (insn);
4093 return insn;
4096 /* Emit the insns in a chain starting with INSN.
4097 Return the last insn emitted. */
4100 emit_insns (insn)
4101 rtx insn;
4103 rtx last = 0;
4105 while (insn)
4107 rtx next = NEXT_INSN (insn);
4108 add_insn (insn);
4109 last = insn;
4110 insn = next;
4113 return last;
4116 /* Emit the insns in a chain starting with INSN and place them in front of
4117 the insn BEFORE. Return the last insn emitted. */
4120 emit_insns_before (insn, before)
4121 rtx insn;
4122 rtx before;
4124 rtx last = 0;
4126 while (insn)
4128 rtx next = NEXT_INSN (insn);
4129 add_insn_before (insn, before);
4130 last = insn;
4131 insn = next;
4134 return last;
4137 /* Emit the insns in a chain starting with FIRST and place them in back of
4138 the insn AFTER. Return the last insn emitted. */
4141 emit_insns_after (first, after)
4142 rtx first;
4143 rtx after;
4145 rtx last;
4146 rtx after_after;
4147 basic_block bb;
4149 if (!after)
4150 abort ();
4152 if (!first)
4153 return after;
4155 if (basic_block_for_insn
4156 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
4157 && (bb = BLOCK_FOR_INSN (after)))
4159 bb->flags |= BB_DIRTY;
4160 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4161 set_block_for_insn (last, bb);
4162 set_block_for_insn (last, bb);
4163 if (bb->end == after)
4164 bb->end = last;
4166 else
4167 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4168 continue;
4170 after_after = NEXT_INSN (after);
4172 NEXT_INSN (after) = first;
4173 PREV_INSN (first) = after;
4174 NEXT_INSN (last) = after_after;
4175 if (after_after)
4176 PREV_INSN (after_after) = last;
4178 if (after == last_insn)
4179 last_insn = last;
4180 return last;
4183 /* Make an insn of code JUMP_INSN with pattern PATTERN
4184 and add it to the end of the doubly-linked list. */
4187 emit_jump_insn (pattern)
4188 rtx pattern;
4190 if (GET_CODE (pattern) == SEQUENCE)
4191 return emit_insn (pattern);
4192 else
4194 rtx insn = make_jump_insn_raw (pattern);
4195 add_insn (insn);
4196 return insn;
4200 /* Make an insn of code CALL_INSN with pattern PATTERN
4201 and add it to the end of the doubly-linked list. */
4204 emit_call_insn (pattern)
4205 rtx pattern;
4207 if (GET_CODE (pattern) == SEQUENCE)
4208 return emit_insn (pattern);
4209 else
4211 rtx insn = make_call_insn_raw (pattern);
4212 add_insn (insn);
4213 PUT_CODE (insn, CALL_INSN);
4214 return insn;
4218 /* Add the label LABEL to the end of the doubly-linked list. */
4221 emit_label (label)
4222 rtx label;
4224 /* This can be called twice for the same label
4225 as a result of the confusion that follows a syntax error!
4226 So make it harmless. */
4227 if (INSN_UID (label) == 0)
4229 INSN_UID (label) = cur_insn_uid++;
4230 add_insn (label);
4232 return label;
4235 /* Make an insn of code BARRIER
4236 and add it to the end of the doubly-linked list. */
4239 emit_barrier ()
4241 rtx barrier = rtx_alloc (BARRIER);
4242 INSN_UID (barrier) = cur_insn_uid++;
4243 add_insn (barrier);
4244 return barrier;
4247 /* Make an insn of code NOTE
4248 with data-fields specified by FILE and LINE
4249 and add it to the end of the doubly-linked list,
4250 but only if line-numbers are desired for debugging info. */
4253 emit_line_note (file, line)
4254 const char *file;
4255 int line;
4257 set_file_and_line_for_stmt (file, line);
4259 #if 0
4260 if (no_line_numbers)
4261 return 0;
4262 #endif
4264 return emit_note (file, line);
4267 /* Make an insn of code NOTE
4268 with data-fields specified by FILE and LINE
4269 and add it to the end of the doubly-linked list.
4270 If it is a line-number NOTE, omit it if it matches the previous one. */
4273 emit_note (file, line)
4274 const char *file;
4275 int line;
4277 rtx note;
4279 if (line > 0)
4281 if (file && last_filename && !strcmp (file, last_filename)
4282 && line == last_linenum)
4283 return 0;
4284 last_filename = file;
4285 last_linenum = line;
4288 if (no_line_numbers && line > 0)
4290 cur_insn_uid++;
4291 return 0;
4294 note = rtx_alloc (NOTE);
4295 INSN_UID (note) = cur_insn_uid++;
4296 NOTE_SOURCE_FILE (note) = file;
4297 NOTE_LINE_NUMBER (note) = line;
4298 add_insn (note);
4299 return note;
4302 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4305 emit_line_note_force (file, line)
4306 const char *file;
4307 int line;
4309 last_linenum = -1;
4310 return emit_line_note (file, line);
4313 /* Cause next statement to emit a line note even if the line number
4314 has not changed. This is used at the beginning of a function. */
4316 void
4317 force_next_line_note ()
4319 last_linenum = -1;
4322 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4323 note of this type already exists, remove it first. */
4326 set_unique_reg_note (insn, kind, datum)
4327 rtx insn;
4328 enum reg_note kind;
4329 rtx datum;
4331 rtx note = find_reg_note (insn, kind, NULL_RTX);
4333 switch (kind)
4335 case REG_EQUAL:
4336 case REG_EQUIV:
4337 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4338 has multiple sets (some callers assume single_set
4339 means the insn only has one set, when in fact it
4340 means the insn only has one * useful * set). */
4341 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4343 if (note)
4344 abort ();
4345 return NULL_RTX;
4348 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4349 It serves no useful purpose and breaks eliminate_regs. */
4350 if (GET_CODE (datum) == ASM_OPERANDS)
4351 return NULL_RTX;
4352 break;
4354 default:
4355 break;
4358 if (note)
4360 XEXP (note, 0) = datum;
4361 return note;
4364 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4365 return REG_NOTES (insn);
4368 /* Return an indication of which type of insn should have X as a body.
4369 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4371 enum rtx_code
4372 classify_insn (x)
4373 rtx x;
4375 if (GET_CODE (x) == CODE_LABEL)
4376 return CODE_LABEL;
4377 if (GET_CODE (x) == CALL)
4378 return CALL_INSN;
4379 if (GET_CODE (x) == RETURN)
4380 return JUMP_INSN;
4381 if (GET_CODE (x) == SET)
4383 if (SET_DEST (x) == pc_rtx)
4384 return JUMP_INSN;
4385 else if (GET_CODE (SET_SRC (x)) == CALL)
4386 return CALL_INSN;
4387 else
4388 return INSN;
4390 if (GET_CODE (x) == PARALLEL)
4392 int j;
4393 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4394 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4395 return CALL_INSN;
4396 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4397 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4398 return JUMP_INSN;
4399 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4400 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4401 return CALL_INSN;
4403 return INSN;
4406 /* Emit the rtl pattern X as an appropriate kind of insn.
4407 If X is a label, it is simply added into the insn chain. */
4410 emit (x)
4411 rtx x;
4413 enum rtx_code code = classify_insn (x);
4415 if (code == CODE_LABEL)
4416 return emit_label (x);
4417 else if (code == INSN)
4418 return emit_insn (x);
4419 else if (code == JUMP_INSN)
4421 rtx insn = emit_jump_insn (x);
4422 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4423 return emit_barrier ();
4424 return insn;
4426 else if (code == CALL_INSN)
4427 return emit_call_insn (x);
4428 else
4429 abort ();
4432 /* Begin emitting insns to a sequence which can be packaged in an
4433 RTL_EXPR. If this sequence will contain something that might cause
4434 the compiler to pop arguments to function calls (because those
4435 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4436 details), use do_pending_stack_adjust before calling this function.
4437 That will ensure that the deferred pops are not accidentally
4438 emitted in the middle of this sequence. */
4440 void
4441 start_sequence ()
4443 struct sequence_stack *tem;
4445 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4447 tem->next = seq_stack;
4448 tem->first = first_insn;
4449 tem->last = last_insn;
4450 tem->sequence_rtl_expr = seq_rtl_expr;
4452 seq_stack = tem;
4454 first_insn = 0;
4455 last_insn = 0;
4458 /* Similarly, but indicate that this sequence will be placed in T, an
4459 RTL_EXPR. See the documentation for start_sequence for more
4460 information about how to use this function. */
4462 void
4463 start_sequence_for_rtl_expr (t)
4464 tree t;
4466 start_sequence ();
4468 seq_rtl_expr = t;
4471 /* Set up the insn chain starting with FIRST as the current sequence,
4472 saving the previously current one. See the documentation for
4473 start_sequence for more information about how to use this function. */
4475 void
4476 push_to_sequence (first)
4477 rtx first;
4479 rtx last;
4481 start_sequence ();
4483 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4485 first_insn = first;
4486 last_insn = last;
4489 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4491 void
4492 push_to_full_sequence (first, last)
4493 rtx first, last;
4495 start_sequence ();
4496 first_insn = first;
4497 last_insn = last;
4498 /* We really should have the end of the insn chain here. */
4499 if (last && NEXT_INSN (last))
4500 abort ();
4503 /* Set up the outer-level insn chain
4504 as the current sequence, saving the previously current one. */
4506 void
4507 push_topmost_sequence ()
4509 struct sequence_stack *stack, *top = NULL;
4511 start_sequence ();
4513 for (stack = seq_stack; stack; stack = stack->next)
4514 top = stack;
4516 first_insn = top->first;
4517 last_insn = top->last;
4518 seq_rtl_expr = top->sequence_rtl_expr;
4521 /* After emitting to the outer-level insn chain, update the outer-level
4522 insn chain, and restore the previous saved state. */
4524 void
4525 pop_topmost_sequence ()
4527 struct sequence_stack *stack, *top = NULL;
4529 for (stack = seq_stack; stack; stack = stack->next)
4530 top = stack;
4532 top->first = first_insn;
4533 top->last = last_insn;
4534 /* ??? Why don't we save seq_rtl_expr here? */
4536 end_sequence ();
4539 /* After emitting to a sequence, restore previous saved state.
4541 To get the contents of the sequence just made, you must call
4542 `gen_sequence' *before* calling here.
4544 If the compiler might have deferred popping arguments while
4545 generating this sequence, and this sequence will not be immediately
4546 inserted into the instruction stream, use do_pending_stack_adjust
4547 before calling gen_sequence. That will ensure that the deferred
4548 pops are inserted into this sequence, and not into some random
4549 location in the instruction stream. See INHIBIT_DEFER_POP for more
4550 information about deferred popping of arguments. */
4552 void
4553 end_sequence ()
4555 struct sequence_stack *tem = seq_stack;
4557 first_insn = tem->first;
4558 last_insn = tem->last;
4559 seq_rtl_expr = tem->sequence_rtl_expr;
4560 seq_stack = tem->next;
4562 free (tem);
4565 /* This works like end_sequence, but records the old sequence in FIRST
4566 and LAST. */
4568 void
4569 end_full_sequence (first, last)
4570 rtx *first, *last;
4572 *first = first_insn;
4573 *last = last_insn;
4574 end_sequence ();
4577 /* Return 1 if currently emitting into a sequence. */
4580 in_sequence_p ()
4582 return seq_stack != 0;
4585 /* Generate a SEQUENCE rtx containing the insns already emitted
4586 to the current sequence.
4588 This is how the gen_... function from a DEFINE_EXPAND
4589 constructs the SEQUENCE that it returns. */
4592 gen_sequence ()
4594 rtx result;
4595 rtx tem;
4596 int i;
4597 int len;
4599 /* Count the insns in the chain. */
4600 len = 0;
4601 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4602 len++;
4604 /* If only one insn, return it rather than a SEQUENCE.
4605 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4606 the case of an empty list.)
4607 We only return the pattern of an insn if its code is INSN and it
4608 has no notes. This ensures that no information gets lost. */
4609 if (len == 1
4610 && GET_CODE (first_insn) == INSN
4611 && ! RTX_FRAME_RELATED_P (first_insn)
4612 /* Don't throw away any reg notes. */
4613 && REG_NOTES (first_insn) == 0)
4614 return PATTERN (first_insn);
4616 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4618 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4619 XVECEXP (result, 0, i) = tem;
4621 return result;
4624 /* Put the various virtual registers into REGNO_REG_RTX. */
4626 void
4627 init_virtual_regs (es)
4628 struct emit_status *es;
4630 rtx *ptr = es->x_regno_reg_rtx;
4631 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4632 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4633 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4634 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4635 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4639 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4640 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4641 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4642 static int copy_insn_n_scratches;
4644 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4645 copied an ASM_OPERANDS.
4646 In that case, it is the original input-operand vector. */
4647 static rtvec orig_asm_operands_vector;
4649 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4650 copied an ASM_OPERANDS.
4651 In that case, it is the copied input-operand vector. */
4652 static rtvec copy_asm_operands_vector;
4654 /* Likewise for the constraints vector. */
4655 static rtvec orig_asm_constraints_vector;
4656 static rtvec copy_asm_constraints_vector;
4658 /* Recursively create a new copy of an rtx for copy_insn.
4659 This function differs from copy_rtx in that it handles SCRATCHes and
4660 ASM_OPERANDs properly.
4661 Normally, this function is not used directly; use copy_insn as front end.
4662 However, you could first copy an insn pattern with copy_insn and then use
4663 this function afterwards to properly copy any REG_NOTEs containing
4664 SCRATCHes. */
4667 copy_insn_1 (orig)
4668 rtx orig;
4670 rtx copy;
4671 int i, j;
4672 RTX_CODE code;
4673 const char *format_ptr;
4675 code = GET_CODE (orig);
4677 switch (code)
4679 case REG:
4680 case QUEUED:
4681 case CONST_INT:
4682 case CONST_DOUBLE:
4683 case CONST_VECTOR:
4684 case SYMBOL_REF:
4685 case CODE_LABEL:
4686 case PC:
4687 case CC0:
4688 case ADDRESSOF:
4689 return orig;
4691 case SCRATCH:
4692 for (i = 0; i < copy_insn_n_scratches; i++)
4693 if (copy_insn_scratch_in[i] == orig)
4694 return copy_insn_scratch_out[i];
4695 break;
4697 case CONST:
4698 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4699 a LABEL_REF, it isn't sharable. */
4700 if (GET_CODE (XEXP (orig, 0)) == PLUS
4701 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4702 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4703 return orig;
4704 break;
4706 /* A MEM with a constant address is not sharable. The problem is that
4707 the constant address may need to be reloaded. If the mem is shared,
4708 then reloading one copy of this mem will cause all copies to appear
4709 to have been reloaded. */
4711 default:
4712 break;
4715 copy = rtx_alloc (code);
4717 /* Copy the various flags, and other information. We assume that
4718 all fields need copying, and then clear the fields that should
4719 not be copied. That is the sensible default behavior, and forces
4720 us to explicitly document why we are *not* copying a flag. */
4721 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4723 /* We do not copy the USED flag, which is used as a mark bit during
4724 walks over the RTL. */
4725 RTX_FLAG (copy, used) = 0;
4727 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4728 if (GET_RTX_CLASS (code) == 'i')
4730 RTX_FLAG (copy, jump) = 0;
4731 RTX_FLAG (copy, call) = 0;
4732 RTX_FLAG (copy, frame_related) = 0;
4735 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4737 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4739 copy->fld[i] = orig->fld[i];
4740 switch (*format_ptr++)
4742 case 'e':
4743 if (XEXP (orig, i) != NULL)
4744 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4745 break;
4747 case 'E':
4748 case 'V':
4749 if (XVEC (orig, i) == orig_asm_constraints_vector)
4750 XVEC (copy, i) = copy_asm_constraints_vector;
4751 else if (XVEC (orig, i) == orig_asm_operands_vector)
4752 XVEC (copy, i) = copy_asm_operands_vector;
4753 else if (XVEC (orig, i) != NULL)
4755 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4756 for (j = 0; j < XVECLEN (copy, i); j++)
4757 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4759 break;
4761 case 't':
4762 case 'w':
4763 case 'i':
4764 case 's':
4765 case 'S':
4766 case 'u':
4767 case '0':
4768 /* These are left unchanged. */
4769 break;
4771 default:
4772 abort ();
4776 if (code == SCRATCH)
4778 i = copy_insn_n_scratches++;
4779 if (i >= MAX_RECOG_OPERANDS)
4780 abort ();
4781 copy_insn_scratch_in[i] = orig;
4782 copy_insn_scratch_out[i] = copy;
4784 else if (code == ASM_OPERANDS)
4786 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4787 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4788 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4789 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4792 return copy;
4795 /* Create a new copy of an rtx.
4796 This function differs from copy_rtx in that it handles SCRATCHes and
4797 ASM_OPERANDs properly.
4798 INSN doesn't really have to be a full INSN; it could be just the
4799 pattern. */
4801 copy_insn (insn)
4802 rtx insn;
4804 copy_insn_n_scratches = 0;
4805 orig_asm_operands_vector = 0;
4806 orig_asm_constraints_vector = 0;
4807 copy_asm_operands_vector = 0;
4808 copy_asm_constraints_vector = 0;
4809 return copy_insn_1 (insn);
4812 /* Initialize data structures and variables in this file
4813 before generating rtl for each function. */
4815 void
4816 init_emit ()
4818 struct function *f = cfun;
4820 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4821 first_insn = NULL;
4822 last_insn = NULL;
4823 seq_rtl_expr = NULL;
4824 cur_insn_uid = 1;
4825 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4826 last_linenum = 0;
4827 last_filename = 0;
4828 first_label_num = label_num;
4829 last_label_num = 0;
4830 seq_stack = NULL;
4832 /* Init the tables that describe all the pseudo regs. */
4834 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4836 f->emit->regno_pointer_align
4837 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4838 sizeof (unsigned char));
4840 regno_reg_rtx
4841 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4843 f->emit->regno_decl
4844 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4846 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4847 init_virtual_regs (f->emit);
4849 /* Indicate that the virtual registers and stack locations are
4850 all pointers. */
4851 REG_POINTER (stack_pointer_rtx) = 1;
4852 REG_POINTER (frame_pointer_rtx) = 1;
4853 REG_POINTER (hard_frame_pointer_rtx) = 1;
4854 REG_POINTER (arg_pointer_rtx) = 1;
4856 REG_POINTER (virtual_incoming_args_rtx) = 1;
4857 REG_POINTER (virtual_stack_vars_rtx) = 1;
4858 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4859 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4860 REG_POINTER (virtual_cfa_rtx) = 1;
4862 #ifdef STACK_BOUNDARY
4863 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4864 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4865 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4866 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4868 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4869 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4870 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4871 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4872 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4873 #endif
4875 #ifdef INIT_EXPANDERS
4876 INIT_EXPANDERS;
4877 #endif
4880 /* Mark SS for GC. */
4882 static void
4883 mark_sequence_stack (ss)
4884 struct sequence_stack *ss;
4886 while (ss)
4888 ggc_mark_rtx (ss->first);
4889 ggc_mark_tree (ss->sequence_rtl_expr);
4890 ss = ss->next;
4894 /* Mark ES for GC. */
4896 void
4897 mark_emit_status (es)
4898 struct emit_status *es;
4900 rtx *r;
4901 tree *t;
4902 int i;
4904 if (es == 0)
4905 return;
4907 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4908 t = es->regno_decl;
4909 i > 0; --i, ++r, ++t)
4911 ggc_mark_rtx (*r);
4912 ggc_mark_tree (*t);
4915 mark_sequence_stack (es->sequence_stack);
4916 ggc_mark_tree (es->sequence_rtl_expr);
4917 ggc_mark_rtx (es->x_first_insn);
4920 /* Generate the constant 0. */
4922 static rtx
4923 gen_const_vector_0 (mode)
4924 enum machine_mode mode;
4926 rtx tem;
4927 rtvec v;
4928 int units, i;
4929 enum machine_mode inner;
4931 units = GET_MODE_NUNITS (mode);
4932 inner = GET_MODE_INNER (mode);
4934 v = rtvec_alloc (units);
4936 /* We need to call this function after we to set CONST0_RTX first. */
4937 if (!CONST0_RTX (inner))
4938 abort ();
4940 for (i = 0; i < units; ++i)
4941 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4943 tem = gen_rtx_CONST_VECTOR (mode, v);
4944 return tem;
4947 /* Create some permanent unique rtl objects shared between all functions.
4948 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4950 void
4951 init_emit_once (line_numbers)
4952 int line_numbers;
4954 int i;
4955 enum machine_mode mode;
4956 enum machine_mode double_mode;
4958 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
4959 tables. */
4960 const_int_htab = htab_create (37, const_int_htab_hash,
4961 const_int_htab_eq, NULL);
4962 ggc_add_deletable_htab (const_int_htab, 0, 0);
4964 const_double_htab = htab_create (37, const_double_htab_hash,
4965 const_double_htab_eq, NULL);
4966 ggc_add_deletable_htab (const_double_htab, 0, 0);
4968 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4969 mem_attrs_htab_eq, NULL);
4970 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4972 no_line_numbers = ! line_numbers;
4974 /* Compute the word and byte modes. */
4976 byte_mode = VOIDmode;
4977 word_mode = VOIDmode;
4978 double_mode = VOIDmode;
4980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4981 mode = GET_MODE_WIDER_MODE (mode))
4983 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4984 && byte_mode == VOIDmode)
4985 byte_mode = mode;
4987 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4988 && word_mode == VOIDmode)
4989 word_mode = mode;
4992 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4993 mode = GET_MODE_WIDER_MODE (mode))
4995 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4996 && double_mode == VOIDmode)
4997 double_mode = mode;
5000 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5002 /* Assign register numbers to the globally defined register rtx.
5003 This must be done at runtime because the register number field
5004 is in a union and some compilers can't initialize unions. */
5006 pc_rtx = gen_rtx (PC, VOIDmode);
5007 cc0_rtx = gen_rtx (CC0, VOIDmode);
5008 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5009 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5010 if (hard_frame_pointer_rtx == 0)
5011 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5012 HARD_FRAME_POINTER_REGNUM);
5013 if (arg_pointer_rtx == 0)
5014 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5015 virtual_incoming_args_rtx =
5016 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5017 virtual_stack_vars_rtx =
5018 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5019 virtual_stack_dynamic_rtx =
5020 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5021 virtual_outgoing_args_rtx =
5022 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5023 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5025 /* These rtx must be roots if GC is enabled. */
5026 ggc_add_rtx_root (global_rtl, GR_MAX);
5028 #ifdef INIT_EXPANDERS
5029 /* This is to initialize {init|mark|free}_machine_status before the first
5030 call to push_function_context_to. This is needed by the Chill front
5031 end which calls push_function_context_to before the first call to
5032 init_function_start. */
5033 INIT_EXPANDERS;
5034 #endif
5036 /* Create the unique rtx's for certain rtx codes and operand values. */
5038 /* Don't use gen_rtx here since gen_rtx in this case
5039 tries to use these variables. */
5040 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5041 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5042 gen_rtx_raw_CONST_INT (VOIDmode, i);
5043 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
5045 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5046 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5047 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5048 else
5049 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5051 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5052 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5053 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5054 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5056 for (i = 0; i <= 2; i++)
5058 REAL_VALUE_TYPE *r =
5059 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5061 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5062 mode = GET_MODE_WIDER_MODE (mode))
5063 const_tiny_rtx[i][(int) mode] =
5064 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5066 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5069 mode = GET_MODE_WIDER_MODE (mode))
5070 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5072 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5073 mode != VOIDmode;
5074 mode = GET_MODE_WIDER_MODE (mode))
5075 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5078 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5079 mode != VOIDmode;
5080 mode = GET_MODE_WIDER_MODE (mode))
5081 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5084 mode != VOIDmode;
5085 mode = GET_MODE_WIDER_MODE (mode))
5086 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5088 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5089 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5090 const_tiny_rtx[0][i] = const0_rtx;
5092 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5093 if (STORE_FLAG_VALUE == 1)
5094 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5096 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
5097 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
5098 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
5099 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
5100 ggc_add_rtx_root (&const_true_rtx, 1);
5102 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5103 return_address_pointer_rtx
5104 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5105 #endif
5107 #ifdef STRUCT_VALUE
5108 struct_value_rtx = STRUCT_VALUE;
5109 #else
5110 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5111 #endif
5113 #ifdef STRUCT_VALUE_INCOMING
5114 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5115 #else
5116 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5117 struct_value_incoming_rtx
5118 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5119 #else
5120 struct_value_incoming_rtx = struct_value_rtx;
5121 #endif
5122 #endif
5124 #ifdef STATIC_CHAIN_REGNUM
5125 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5127 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5128 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5129 static_chain_incoming_rtx
5130 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5131 else
5132 #endif
5133 static_chain_incoming_rtx = static_chain_rtx;
5134 #endif
5136 #ifdef STATIC_CHAIN
5137 static_chain_rtx = STATIC_CHAIN;
5139 #ifdef STATIC_CHAIN_INCOMING
5140 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5141 #else
5142 static_chain_incoming_rtx = static_chain_rtx;
5143 #endif
5144 #endif
5146 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5147 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5149 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5150 ggc_add_rtx_root (&struct_value_rtx, 1);
5151 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5152 ggc_add_rtx_root (&static_chain_rtx, 1);
5153 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5154 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5157 /* Query and clear/ restore no_line_numbers. This is used by the
5158 switch / case handling in stmt.c to give proper line numbers in
5159 warnings about unreachable code. */
5162 force_line_numbers ()
5164 int old = no_line_numbers;
5166 no_line_numbers = 0;
5167 if (old)
5168 force_next_line_note ();
5169 return old;
5172 void
5173 restore_line_number_status (old_value)
5174 int old_value;
5176 no_line_numbers = old_value;
5179 /* Produce exact duplicate of insn INSN after AFTER.
5180 Care updating of libcall regions if present. */
5183 emit_copy_of_insn_after (insn, after)
5184 rtx insn, after;
5186 rtx new;
5187 rtx note1, note2, link;
5189 switch (GET_CODE (insn))
5191 case INSN:
5192 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5193 break;
5195 case JUMP_INSN:
5196 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5197 break;
5199 case CALL_INSN:
5200 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5201 if (CALL_INSN_FUNCTION_USAGE (insn))
5202 CALL_INSN_FUNCTION_USAGE (new)
5203 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5204 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5205 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5206 break;
5208 default:
5209 abort ();
5212 /* Update LABEL_NUSES. */
5213 mark_jump_label (PATTERN (new), new, 0);
5215 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5216 make them. */
5217 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5218 if (REG_NOTE_KIND (link) != REG_LABEL)
5220 if (GET_CODE (link) == EXPR_LIST)
5221 REG_NOTES (new)
5222 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5223 XEXP (link, 0),
5224 REG_NOTES (new)));
5225 else
5226 REG_NOTES (new)
5227 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5228 XEXP (link, 0),
5229 REG_NOTES (new)));
5232 /* Fix the libcall sequences. */
5233 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5235 rtx p = new;
5236 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5237 p = PREV_INSN (p);
5238 XEXP (note1, 0) = p;
5239 XEXP (note2, 0) = new;
5241 return new;