Daily bump.
[official-gcc.git] / gcc / emit-rtl.c
blob3fcb172186b631b53c175e23b2bd767b9369f610
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these except perhaps the floating-point CONST_DOUBLEs
91 are unique; no other rtx-object will be equal to any of these. */
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
151 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
152 shortly thrown away. We use two mechanisms to prevent this waste:
154 For sizes up to 5 elements, we keep a SEQUENCE and its associated
155 rtvec for use by gen_sequence. One entry for each size is
156 sufficient because most cases are calls to gen_sequence followed by
157 immediately emitting the SEQUENCE. Reuse is safe since emitting a
158 sequence is destructive on the insn in it anyway and hence can't be
159 redone.
161 We do not bother to save this cached data over nested function calls.
162 Instead, we just reinitialize them. */
164 #define SEQUENCE_RESULT_SIZE 5
166 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
168 /* During RTL generation, we also keep a list of free INSN rtl codes. */
169 static rtx free_insn;
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_linenum (cfun->emit->x_last_linenum)
175 #define last_filename (cfun->emit->x_last_filename)
176 #define first_label_num (cfun->emit->x_first_label_num)
178 static rtx make_jump_insn_raw PARAMS ((rtx));
179 static rtx make_call_insn_raw PARAMS ((rtx));
180 static rtx find_line_note PARAMS ((rtx));
181 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
182 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
183 int));
184 static void unshare_all_rtl_1 PARAMS ((rtx));
185 static void unshare_all_decls PARAMS ((tree));
186 static void reset_used_decls PARAMS ((tree));
187 static void mark_label_nuses PARAMS ((rtx));
188 static hashval_t const_int_htab_hash PARAMS ((const void *));
189 static int const_int_htab_eq PARAMS ((const void *,
190 const void *));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static void mem_attrs_mark PARAMS ((const void *));
195 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
196 rtx, unsigned int,
197 enum machine_mode));
198 static tree component_ref_for_mem_expr PARAMS ((tree));
199 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
207 static hashval_t
208 const_int_htab_hash (x)
209 const void *x;
211 return (hashval_t) INTVAL ((const struct rtx_def *) x);
214 /* Returns non-zero if the value represented by X (which is really a
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
218 static int
219 const_int_htab_eq (x, y)
220 const void *x;
221 const void *y;
223 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
226 /* Returns a hash code for X (which is a really a mem_attrs *). */
228 static hashval_t
229 mem_attrs_htab_hash (x)
230 const void *x;
232 mem_attrs *p = (mem_attrs *) x;
234 return (p->alias ^ (p->align * 1000)
235 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
236 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
237 ^ (size_t) p->expr);
240 /* Returns non-zero if the value represented by X (which is really a
241 mem_attrs *) is the same as that given by Y (which is also really a
242 mem_attrs *). */
244 static int
245 mem_attrs_htab_eq (x, y)
246 const void *x;
247 const void *y;
249 mem_attrs *p = (mem_attrs *) x;
250 mem_attrs *q = (mem_attrs *) y;
252 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
253 && p->size == q->size && p->align == q->align);
256 /* This routine is called when we determine that we need a mem_attrs entry.
257 It marks the associated decl and RTL as being used, if present. */
259 static void
260 mem_attrs_mark (x)
261 const void *x;
263 mem_attrs *p = (mem_attrs *) x;
265 if (p->expr)
266 ggc_mark_tree (p->expr);
268 if (p->offset)
269 ggc_mark_rtx (p->offset);
271 if (p->size)
272 ggc_mark_rtx (p->size);
275 /* Allocate a new mem_attrs structure and insert it into the hash table if
276 one identical to it is not already in the table. We are doing this for
277 MEM of mode MODE. */
279 static mem_attrs *
280 get_mem_attrs (alias, expr, offset, size, align, mode)
281 HOST_WIDE_INT alias;
282 tree expr;
283 rtx offset;
284 rtx size;
285 unsigned int align;
286 enum machine_mode mode;
288 mem_attrs attrs;
289 void **slot;
291 /* If everything is the default, we can just return zero. */
292 if (alias == 0 && expr == 0 && offset == 0
293 && (size == 0
294 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
295 && (align == BITS_PER_UNIT
296 || (STRICT_ALIGNMENT
297 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
298 return 0;
300 attrs.alias = alias;
301 attrs.expr = expr;
302 attrs.offset = offset;
303 attrs.size = size;
304 attrs.align = align;
306 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
307 if (*slot == 0)
309 *slot = ggc_alloc (sizeof (mem_attrs));
310 memcpy (*slot, &attrs, sizeof (mem_attrs));
313 return *slot;
316 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
317 don't attempt to share with the various global pieces of rtl (such as
318 frame_pointer_rtx). */
321 gen_raw_REG (mode, regno)
322 enum machine_mode mode;
323 int regno;
325 rtx x = gen_rtx_raw_REG (mode, regno);
326 ORIGINAL_REGNO (x) = regno;
327 return x;
330 /* There are some RTL codes that require special attention; the generation
331 functions do the raw handling. If you add to this list, modify
332 special_rtx in gengenrtl.c as well. */
335 gen_rtx_CONST_INT (mode, arg)
336 enum machine_mode mode ATTRIBUTE_UNUSED;
337 HOST_WIDE_INT arg;
339 void **slot;
341 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
342 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
344 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
345 if (const_true_rtx && arg == STORE_FLAG_VALUE)
346 return const_true_rtx;
347 #endif
349 /* Look up the CONST_INT in the hash table. */
350 slot = htab_find_slot_with_hash (const_int_htab, &arg,
351 (hashval_t) arg, INSERT);
352 if (*slot == 0)
353 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
355 return (rtx) *slot;
359 gen_int_mode (c, mode)
360 HOST_WIDE_INT c;
361 enum machine_mode mode;
363 return GEN_INT (trunc_int_for_mode (c, mode));
366 /* CONST_DOUBLEs needs special handling because their length is known
367 only at run-time. */
370 gen_rtx_CONST_DOUBLE (mode, arg0, arg1)
371 enum machine_mode mode;
372 HOST_WIDE_INT arg0, arg1;
374 rtx r = rtx_alloc (CONST_DOUBLE);
375 int i;
377 PUT_MODE (r, mode);
378 X0EXP (r, 0) = NULL_RTX;
379 XWINT (r, 1) = arg0;
380 XWINT (r, 2) = arg1;
382 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 2; --i)
383 XWINT (r, i) = 0;
385 return r;
389 gen_rtx_REG (mode, regno)
390 enum machine_mode mode;
391 int regno;
393 /* In case the MD file explicitly references the frame pointer, have
394 all such references point to the same frame pointer. This is
395 used during frame pointer elimination to distinguish the explicit
396 references to these registers from pseudos that happened to be
397 assigned to them.
399 If we have eliminated the frame pointer or arg pointer, we will
400 be using it as a normal register, for example as a spill
401 register. In such cases, we might be accessing it in a mode that
402 is not Pmode and therefore cannot use the pre-allocated rtx.
404 Also don't do this when we are making new REGs in reload, since
405 we don't want to get confused with the real pointers. */
407 if (mode == Pmode && !reload_in_progress)
409 if (regno == FRAME_POINTER_REGNUM)
410 return frame_pointer_rtx;
411 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
412 if (regno == HARD_FRAME_POINTER_REGNUM)
413 return hard_frame_pointer_rtx;
414 #endif
415 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
416 if (regno == ARG_POINTER_REGNUM)
417 return arg_pointer_rtx;
418 #endif
419 #ifdef RETURN_ADDRESS_POINTER_REGNUM
420 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
421 return return_address_pointer_rtx;
422 #endif
423 if (regno == PIC_OFFSET_TABLE_REGNUM
424 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
425 return pic_offset_table_rtx;
426 if (regno == STACK_POINTER_REGNUM)
427 return stack_pointer_rtx;
430 return gen_raw_REG (mode, regno);
434 gen_rtx_MEM (mode, addr)
435 enum machine_mode mode;
436 rtx addr;
438 rtx rt = gen_rtx_raw_MEM (mode, addr);
440 /* This field is not cleared by the mere allocation of the rtx, so
441 we clear it here. */
442 MEM_ATTRS (rt) = 0;
444 return rt;
448 gen_rtx_SUBREG (mode, reg, offset)
449 enum machine_mode mode;
450 rtx reg;
451 int offset;
453 /* This is the most common failure type.
454 Catch it early so we can see who does it. */
455 if ((offset % GET_MODE_SIZE (mode)) != 0)
456 abort ();
458 /* This check isn't usable right now because combine will
459 throw arbitrary crap like a CALL into a SUBREG in
460 gen_lowpart_for_combine so we must just eat it. */
461 #if 0
462 /* Check for this too. */
463 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
464 abort ();
465 #endif
466 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
469 /* Generate a SUBREG representing the least-significant part of REG if MODE
470 is smaller than mode of REG, otherwise paradoxical SUBREG. */
473 gen_lowpart_SUBREG (mode, reg)
474 enum machine_mode mode;
475 rtx reg;
477 enum machine_mode inmode;
479 inmode = GET_MODE (reg);
480 if (inmode == VOIDmode)
481 inmode = mode;
482 return gen_rtx_SUBREG (mode, reg,
483 subreg_lowpart_offset (mode, inmode));
486 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
488 ** This routine generates an RTX of the size specified by
489 ** <code>, which is an RTX code. The RTX structure is initialized
490 ** from the arguments <element1> through <elementn>, which are
491 ** interpreted according to the specific RTX type's format. The
492 ** special machine mode associated with the rtx (if any) is specified
493 ** in <mode>.
495 ** gen_rtx can be invoked in a way which resembles the lisp-like
496 ** rtx it will generate. For example, the following rtx structure:
498 ** (plus:QI (mem:QI (reg:SI 1))
499 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
501 ** ...would be generated by the following C code:
503 ** gen_rtx (PLUS, QImode,
504 ** gen_rtx (MEM, QImode,
505 ** gen_rtx (REG, SImode, 1)),
506 ** gen_rtx (MEM, QImode,
507 ** gen_rtx (PLUS, SImode,
508 ** gen_rtx (REG, SImode, 2),
509 ** gen_rtx (REG, SImode, 3)))),
512 /*VARARGS2*/
514 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
516 int i; /* Array indices... */
517 const char *fmt; /* Current rtx's format... */
518 rtx rt_val; /* RTX to return to caller... */
520 VA_OPEN (p, mode);
521 VA_FIXEDARG (p, enum rtx_code, code);
522 VA_FIXEDARG (p, enum machine_mode, mode);
524 switch (code)
526 case CONST_INT:
527 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
528 break;
530 case CONST_DOUBLE:
532 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
533 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
535 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1);
537 break;
539 case REG:
540 rt_val = gen_rtx_REG (mode, va_arg (p, int));
541 break;
543 case MEM:
544 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
545 break;
547 default:
548 rt_val = rtx_alloc (code); /* Allocate the storage space. */
549 rt_val->mode = mode; /* Store the machine mode... */
551 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
552 for (i = 0; i < GET_RTX_LENGTH (code); i++)
554 switch (*fmt++)
556 case '0': /* Unused field. */
557 break;
559 case 'i': /* An integer? */
560 XINT (rt_val, i) = va_arg (p, int);
561 break;
563 case 'w': /* A wide integer? */
564 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
565 break;
567 case 's': /* A string? */
568 XSTR (rt_val, i) = va_arg (p, char *);
569 break;
571 case 'e': /* An expression? */
572 case 'u': /* An insn? Same except when printing. */
573 XEXP (rt_val, i) = va_arg (p, rtx);
574 break;
576 case 'E': /* An RTX vector? */
577 XVEC (rt_val, i) = va_arg (p, rtvec);
578 break;
580 case 'b': /* A bitmap? */
581 XBITMAP (rt_val, i) = va_arg (p, bitmap);
582 break;
584 case 't': /* A tree? */
585 XTREE (rt_val, i) = va_arg (p, tree);
586 break;
588 default:
589 abort ();
592 break;
595 VA_CLOSE (p);
596 return rt_val;
599 /* gen_rtvec (n, [rt1, ..., rtn])
601 ** This routine creates an rtvec and stores within it the
602 ** pointers to rtx's which are its arguments.
605 /*VARARGS1*/
606 rtvec
607 gen_rtvec VPARAMS ((int n, ...))
609 int i, save_n;
610 rtx *vector;
612 VA_OPEN (p, n);
613 VA_FIXEDARG (p, int, n);
615 if (n == 0)
616 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
618 vector = (rtx *) alloca (n * sizeof (rtx));
620 for (i = 0; i < n; i++)
621 vector[i] = va_arg (p, rtx);
623 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
624 save_n = n;
625 VA_CLOSE (p);
627 return gen_rtvec_v (save_n, vector);
630 rtvec
631 gen_rtvec_v (n, argp)
632 int n;
633 rtx *argp;
635 int i;
636 rtvec rt_val;
638 if (n == 0)
639 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
641 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
643 for (i = 0; i < n; i++)
644 rt_val->elem[i] = *argp++;
646 return rt_val;
649 /* Generate a REG rtx for a new pseudo register of mode MODE.
650 This pseudo is assigned the next sequential register number. */
653 gen_reg_rtx (mode)
654 enum machine_mode mode;
656 struct function *f = cfun;
657 rtx val;
659 /* Don't let anything called after initial flow analysis create new
660 registers. */
661 if (no_new_pseudos)
662 abort ();
664 if (generating_concat_p
665 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
666 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
668 /* For complex modes, don't make a single pseudo.
669 Instead, make a CONCAT of two pseudos.
670 This allows noncontiguous allocation of the real and imaginary parts,
671 which makes much better code. Besides, allocating DCmode
672 pseudos overstrains reload on some machines like the 386. */
673 rtx realpart, imagpart;
674 int size = GET_MODE_UNIT_SIZE (mode);
675 enum machine_mode partmode
676 = mode_for_size (size * BITS_PER_UNIT,
677 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
678 ? MODE_FLOAT : MODE_INT),
681 realpart = gen_reg_rtx (partmode);
682 imagpart = gen_reg_rtx (partmode);
683 return gen_rtx_CONCAT (mode, realpart, imagpart);
686 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
687 enough to have an element for this pseudo reg number. */
689 if (reg_rtx_no == f->emit->regno_pointer_align_length)
691 int old_size = f->emit->regno_pointer_align_length;
692 char *new;
693 rtx *new1;
694 tree *new2;
696 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
697 memset (new + old_size, 0, old_size);
698 f->emit->regno_pointer_align = (unsigned char *) new;
700 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
701 old_size * 2 * sizeof (rtx));
702 memset (new1 + old_size, 0, old_size * sizeof (rtx));
703 regno_reg_rtx = new1;
705 new2 = (tree *) xrealloc (f->emit->regno_decl,
706 old_size * 2 * sizeof (tree));
707 memset (new2 + old_size, 0, old_size * sizeof (tree));
708 f->emit->regno_decl = new2;
710 f->emit->regno_pointer_align_length = old_size * 2;
713 val = gen_raw_REG (mode, reg_rtx_no);
714 regno_reg_rtx[reg_rtx_no++] = val;
715 return val;
718 /* Identify REG (which may be a CONCAT) as a user register. */
720 void
721 mark_user_reg (reg)
722 rtx reg;
724 if (GET_CODE (reg) == CONCAT)
726 REG_USERVAR_P (XEXP (reg, 0)) = 1;
727 REG_USERVAR_P (XEXP (reg, 1)) = 1;
729 else if (GET_CODE (reg) == REG)
730 REG_USERVAR_P (reg) = 1;
731 else
732 abort ();
735 /* Identify REG as a probable pointer register and show its alignment
736 as ALIGN, if nonzero. */
738 void
739 mark_reg_pointer (reg, align)
740 rtx reg;
741 int align;
743 if (! REG_POINTER (reg))
745 REG_POINTER (reg) = 1;
747 if (align)
748 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
750 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
751 /* We can no-longer be sure just how aligned this pointer is */
752 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
755 /* Return 1 plus largest pseudo reg number used in the current function. */
758 max_reg_num ()
760 return reg_rtx_no;
763 /* Return 1 + the largest label number used so far in the current function. */
766 max_label_num ()
768 if (last_label_num && label_num == base_label_num)
769 return last_label_num;
770 return label_num;
773 /* Return first label number used in this function (if any were used). */
776 get_first_label_num ()
778 return first_label_num;
781 /* Return the final regno of X, which is a SUBREG of a hard
782 register. */
784 subreg_hard_regno (x, check_mode)
785 rtx x;
786 int check_mode;
788 enum machine_mode mode = GET_MODE (x);
789 unsigned int byte_offset, base_regno, final_regno;
790 rtx reg = SUBREG_REG (x);
792 /* This is where we attempt to catch illegal subregs
793 created by the compiler. */
794 if (GET_CODE (x) != SUBREG
795 || GET_CODE (reg) != REG)
796 abort ();
797 base_regno = REGNO (reg);
798 if (base_regno >= FIRST_PSEUDO_REGISTER)
799 abort ();
800 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
801 abort ();
803 /* Catch non-congruent offsets too. */
804 byte_offset = SUBREG_BYTE (x);
805 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
806 abort ();
808 final_regno = subreg_regno (x);
810 return final_regno;
813 /* Return a value representing some low-order bits of X, where the number
814 of low-order bits is given by MODE. Note that no conversion is done
815 between floating-point and fixed-point values, rather, the bit
816 representation is returned.
818 This function handles the cases in common between gen_lowpart, below,
819 and two variants in cse.c and combine.c. These are the cases that can
820 be safely handled at all points in the compilation.
822 If this is not a case we can handle, return 0. */
825 gen_lowpart_common (mode, x)
826 enum machine_mode mode;
827 rtx x;
829 int msize = GET_MODE_SIZE (mode);
830 int xsize = GET_MODE_SIZE (GET_MODE (x));
831 int offset = 0;
833 if (GET_MODE (x) == mode)
834 return x;
836 /* MODE must occupy no more words than the mode of X. */
837 if (GET_MODE (x) != VOIDmode
838 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
839 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
840 return 0;
842 offset = subreg_lowpart_offset (mode, GET_MODE (x));
844 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
845 && (GET_MODE_CLASS (mode) == MODE_INT
846 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
848 /* If we are getting the low-order part of something that has been
849 sign- or zero-extended, we can either just use the object being
850 extended or make a narrower extension. If we want an even smaller
851 piece than the size of the object being extended, call ourselves
852 recursively.
854 This case is used mostly by combine and cse. */
856 if (GET_MODE (XEXP (x, 0)) == mode)
857 return XEXP (x, 0);
858 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
859 return gen_lowpart_common (mode, XEXP (x, 0));
860 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
861 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
863 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
864 || GET_CODE (x) == CONCAT)
865 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
866 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
867 from the low-order part of the constant. */
868 else if ((GET_MODE_CLASS (mode) == MODE_INT
869 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
870 && GET_MODE (x) == VOIDmode
871 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
873 /* If MODE is twice the host word size, X is already the desired
874 representation. Otherwise, if MODE is wider than a word, we can't
875 do this. If MODE is exactly a word, return just one CONST_INT. */
877 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
878 return x;
879 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
880 return 0;
881 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
882 return (GET_CODE (x) == CONST_INT ? x
883 : GEN_INT (CONST_DOUBLE_LOW (x)));
884 else
886 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
887 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
888 : CONST_DOUBLE_LOW (x));
890 /* Sign extend to HOST_WIDE_INT. */
891 val = trunc_int_for_mode (val, mode);
893 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
894 : GEN_INT (val));
898 /* The floating-point emulator can handle all conversions between
899 FP and integer operands. This simplifies reload because it
900 doesn't have to deal with constructs like (subreg:DI
901 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
902 /* Single-precision floats are always 32-bits and double-precision
903 floats are always 64-bits. */
905 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
906 && GET_MODE_BITSIZE (mode) == 32
907 && GET_CODE (x) == CONST_INT)
909 REAL_VALUE_TYPE r;
910 HOST_WIDE_INT i;
912 i = INTVAL (x);
913 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
914 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
916 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
917 && GET_MODE_BITSIZE (mode) == 64
918 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
919 && GET_MODE (x) == VOIDmode)
921 REAL_VALUE_TYPE r;
922 HOST_WIDE_INT i[2];
923 HOST_WIDE_INT low, high;
925 if (GET_CODE (x) == CONST_INT)
927 low = INTVAL (x);
928 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
930 else
932 low = CONST_DOUBLE_LOW (x);
933 high = CONST_DOUBLE_HIGH (x);
936 #if HOST_BITS_PER_WIDE_INT == 32
937 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
938 target machine. */
939 if (WORDS_BIG_ENDIAN)
940 i[0] = high, i[1] = low;
941 else
942 i[0] = low, i[1] = high;
943 #else
944 i[0] = low;
945 #endif
947 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
948 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
950 else if ((GET_MODE_CLASS (mode) == MODE_INT
951 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
952 && GET_CODE (x) == CONST_DOUBLE
953 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
955 REAL_VALUE_TYPE r;
956 long i[4]; /* Only the low 32 bits of each 'long' are used. */
957 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
959 /* Convert 'r' into an array of four 32-bit words in target word
960 order. */
961 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
962 switch (GET_MODE_BITSIZE (GET_MODE (x)))
964 case 32:
965 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
966 i[1] = 0;
967 i[2] = 0;
968 i[3 - 3 * endian] = 0;
969 break;
970 case 64:
971 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
972 i[2 - 2 * endian] = 0;
973 i[3 - 2 * endian] = 0;
974 break;
975 case 96:
976 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
977 i[3 - 3 * endian] = 0;
978 break;
979 case 128:
980 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
981 break;
982 default:
983 abort ();
985 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
986 and return it. */
987 #if HOST_BITS_PER_WIDE_INT == 32
988 return immed_double_const (i[3 * endian], i[1 + endian], mode);
989 #else
990 if (HOST_BITS_PER_WIDE_INT != 64)
991 abort ();
993 return immed_double_const ((((unsigned long) i[3 * endian])
994 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
995 (((unsigned long) i[2 - endian])
996 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
997 mode);
998 #endif
1001 /* Otherwise, we can't do this. */
1002 return 0;
1005 /* Return the real part (which has mode MODE) of a complex value X.
1006 This always comes at the low address in memory. */
1009 gen_realpart (mode, x)
1010 enum machine_mode mode;
1011 rtx x;
1013 if (WORDS_BIG_ENDIAN
1014 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1015 && REG_P (x)
1016 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1017 internal_error
1018 ("can't access real part of complex value in hard register");
1019 else if (WORDS_BIG_ENDIAN)
1020 return gen_highpart (mode, x);
1021 else
1022 return gen_lowpart (mode, x);
1025 /* Return the imaginary part (which has mode MODE) of a complex value X.
1026 This always comes at the high address in memory. */
1029 gen_imagpart (mode, x)
1030 enum machine_mode mode;
1031 rtx x;
1033 if (WORDS_BIG_ENDIAN)
1034 return gen_lowpart (mode, x);
1035 else if (! WORDS_BIG_ENDIAN
1036 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1037 && REG_P (x)
1038 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1039 internal_error
1040 ("can't access imaginary part of complex value in hard register");
1041 else
1042 return gen_highpart (mode, x);
1045 /* Return 1 iff X, assumed to be a SUBREG,
1046 refers to the real part of the complex value in its containing reg.
1047 Complex values are always stored with the real part in the first word,
1048 regardless of WORDS_BIG_ENDIAN. */
1051 subreg_realpart_p (x)
1052 rtx x;
1054 if (GET_CODE (x) != SUBREG)
1055 abort ();
1057 return ((unsigned int) SUBREG_BYTE (x)
1058 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1061 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1062 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1063 least-significant part of X.
1064 MODE specifies how big a part of X to return;
1065 it usually should not be larger than a word.
1066 If X is a MEM whose address is a QUEUED, the value may be so also. */
1069 gen_lowpart (mode, x)
1070 enum machine_mode mode;
1071 rtx x;
1073 rtx result = gen_lowpart_common (mode, x);
1075 if (result)
1076 return result;
1077 else if (GET_CODE (x) == REG)
1079 /* Must be a hard reg that's not valid in MODE. */
1080 result = gen_lowpart_common (mode, copy_to_reg (x));
1081 if (result == 0)
1082 abort ();
1083 return result;
1085 else if (GET_CODE (x) == MEM)
1087 /* The only additional case we can do is MEM. */
1088 int offset = 0;
1089 if (WORDS_BIG_ENDIAN)
1090 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1091 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1093 if (BYTES_BIG_ENDIAN)
1094 /* Adjust the address so that the address-after-the-data
1095 is unchanged. */
1096 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1097 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1099 return adjust_address (x, mode, offset);
1101 else if (GET_CODE (x) == ADDRESSOF)
1102 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1103 else
1104 abort ();
1107 /* Like `gen_lowpart', but refer to the most significant part.
1108 This is used to access the imaginary part of a complex number. */
1111 gen_highpart (mode, x)
1112 enum machine_mode mode;
1113 rtx x;
1115 unsigned int msize = GET_MODE_SIZE (mode);
1116 rtx result;
1118 /* This case loses if X is a subreg. To catch bugs early,
1119 complain if an invalid MODE is used even in other cases. */
1120 if (msize > UNITS_PER_WORD
1121 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1122 abort ();
1124 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1125 subreg_highpart_offset (mode, GET_MODE (x)));
1127 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1128 the target if we have a MEM. gen_highpart must return a valid operand,
1129 emitting code if necessary to do so. */
1130 if (result != NULL_RTX && GET_CODE (result) == MEM)
1131 result = validize_mem (result);
1133 if (!result)
1134 abort ();
1135 return result;
1138 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1139 be VOIDmode constant. */
1141 gen_highpart_mode (outermode, innermode, exp)
1142 enum machine_mode outermode, innermode;
1143 rtx exp;
1145 if (GET_MODE (exp) != VOIDmode)
1147 if (GET_MODE (exp) != innermode)
1148 abort ();
1149 return gen_highpart (outermode, exp);
1151 return simplify_gen_subreg (outermode, exp, innermode,
1152 subreg_highpart_offset (outermode, innermode));
1154 /* Return offset in bytes to get OUTERMODE low part
1155 of the value in mode INNERMODE stored in memory in target format. */
1157 unsigned int
1158 subreg_lowpart_offset (outermode, innermode)
1159 enum machine_mode outermode, innermode;
1161 unsigned int offset = 0;
1162 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1164 if (difference > 0)
1166 if (WORDS_BIG_ENDIAN)
1167 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1168 if (BYTES_BIG_ENDIAN)
1169 offset += difference % UNITS_PER_WORD;
1172 return offset;
1175 /* Return offset in bytes to get OUTERMODE high part
1176 of the value in mode INNERMODE stored in memory in target format. */
1177 unsigned int
1178 subreg_highpart_offset (outermode, innermode)
1179 enum machine_mode outermode, innermode;
1181 unsigned int offset = 0;
1182 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1184 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1185 abort ();
1187 if (difference > 0)
1189 if (! WORDS_BIG_ENDIAN)
1190 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1191 if (! BYTES_BIG_ENDIAN)
1192 offset += difference % UNITS_PER_WORD;
1195 return offset;
1198 /* Return 1 iff X, assumed to be a SUBREG,
1199 refers to the least significant part of its containing reg.
1200 If X is not a SUBREG, always return 1 (it is its own low part!). */
1203 subreg_lowpart_p (x)
1204 rtx x;
1206 if (GET_CODE (x) != SUBREG)
1207 return 1;
1208 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1209 return 0;
1211 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1212 == SUBREG_BYTE (x));
1216 /* Helper routine for all the constant cases of operand_subword.
1217 Some places invoke this directly. */
1220 constant_subword (op, offset, mode)
1221 rtx op;
1222 int offset;
1223 enum machine_mode mode;
1225 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1226 HOST_WIDE_INT val;
1228 /* If OP is already an integer word, return it. */
1229 if (GET_MODE_CLASS (mode) == MODE_INT
1230 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1231 return op;
1233 /* The output is some bits, the width of the target machine's word.
1234 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1235 host can't. */
1236 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1237 && GET_MODE_CLASS (mode) == MODE_FLOAT
1238 && GET_MODE_BITSIZE (mode) == 64
1239 && GET_CODE (op) == CONST_DOUBLE)
1241 long k[2];
1242 REAL_VALUE_TYPE rv;
1244 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1245 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1247 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1248 which the words are written depends on the word endianness.
1249 ??? This is a potential portability problem and should
1250 be fixed at some point.
1252 We must exercise caution with the sign bit. By definition there
1253 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1254 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1255 So we explicitly mask and sign-extend as necessary. */
1256 if (BITS_PER_WORD == 32)
1258 val = k[offset];
1259 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1260 return GEN_INT (val);
1262 #if HOST_BITS_PER_WIDE_INT >= 64
1263 else if (BITS_PER_WORD >= 64 && offset == 0)
1265 val = k[! WORDS_BIG_ENDIAN];
1266 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1267 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1268 return GEN_INT (val);
1270 #endif
1271 else if (BITS_PER_WORD == 16)
1273 val = k[offset >> 1];
1274 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1275 val >>= 16;
1276 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1277 return GEN_INT (val);
1279 else
1280 abort ();
1282 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1283 && GET_MODE_CLASS (mode) == MODE_FLOAT
1284 && GET_MODE_BITSIZE (mode) > 64
1285 && GET_CODE (op) == CONST_DOUBLE)
1287 long k[4];
1288 REAL_VALUE_TYPE rv;
1290 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1291 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1293 if (BITS_PER_WORD == 32)
1295 val = k[offset];
1296 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1297 return GEN_INT (val);
1299 #if HOST_BITS_PER_WIDE_INT >= 64
1300 else if (BITS_PER_WORD >= 64 && offset <= 1)
1302 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1303 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1304 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1305 return GEN_INT (val);
1307 #endif
1308 else
1309 abort ();
1312 /* Single word float is a little harder, since single- and double-word
1313 values often do not have the same high-order bits. We have already
1314 verified that we want the only defined word of the single-word value. */
1315 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1316 && GET_MODE_BITSIZE (mode) == 32
1317 && GET_CODE (op) == CONST_DOUBLE)
1319 long l;
1320 REAL_VALUE_TYPE rv;
1322 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1323 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1325 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1326 val = l;
1327 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1329 if (BITS_PER_WORD == 16)
1331 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1332 val >>= 16;
1333 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1336 return GEN_INT (val);
1339 /* The only remaining cases that we can handle are integers.
1340 Convert to proper endianness now since these cases need it.
1341 At this point, offset == 0 means the low-order word.
1343 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1344 in general. However, if OP is (const_int 0), we can just return
1345 it for any word. */
1347 if (op == const0_rtx)
1348 return op;
1350 if (GET_MODE_CLASS (mode) != MODE_INT
1351 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1352 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1353 return 0;
1355 if (WORDS_BIG_ENDIAN)
1356 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1358 /* Find out which word on the host machine this value is in and get
1359 it from the constant. */
1360 val = (offset / size_ratio == 0
1361 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1362 : (GET_CODE (op) == CONST_INT
1363 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1365 /* Get the value we want into the low bits of val. */
1366 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1367 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1369 val = trunc_int_for_mode (val, word_mode);
1371 return GEN_INT (val);
1374 /* Return subword OFFSET of operand OP.
1375 The word number, OFFSET, is interpreted as the word number starting
1376 at the low-order address. OFFSET 0 is the low-order word if not
1377 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1379 If we cannot extract the required word, we return zero. Otherwise,
1380 an rtx corresponding to the requested word will be returned.
1382 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1383 reload has completed, a valid address will always be returned. After
1384 reload, if a valid address cannot be returned, we return zero.
1386 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1387 it is the responsibility of the caller.
1389 MODE is the mode of OP in case it is a CONST_INT.
1391 ??? This is still rather broken for some cases. The problem for the
1392 moment is that all callers of this thing provide no 'goal mode' to
1393 tell us to work with. This exists because all callers were written
1394 in a word based SUBREG world.
1395 Now use of this function can be deprecated by simplify_subreg in most
1396 cases.
1400 operand_subword (op, offset, validate_address, mode)
1401 rtx op;
1402 unsigned int offset;
1403 int validate_address;
1404 enum machine_mode mode;
1406 if (mode == VOIDmode)
1407 mode = GET_MODE (op);
1409 if (mode == VOIDmode)
1410 abort ();
1412 /* If OP is narrower than a word, fail. */
1413 if (mode != BLKmode
1414 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1415 return 0;
1417 /* If we want a word outside OP, return zero. */
1418 if (mode != BLKmode
1419 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1420 return const0_rtx;
1422 /* Form a new MEM at the requested address. */
1423 if (GET_CODE (op) == MEM)
1425 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1427 if (! validate_address)
1428 return new;
1430 else if (reload_completed)
1432 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1433 return 0;
1435 else
1436 return replace_equiv_address (new, XEXP (new, 0));
1439 /* Rest can be handled by simplify_subreg. */
1440 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1443 /* Similar to `operand_subword', but never return 0. If we can't extract
1444 the required subword, put OP into a register and try again. If that fails,
1445 abort. We always validate the address in this case.
1447 MODE is the mode of OP, in case it is CONST_INT. */
1450 operand_subword_force (op, offset, mode)
1451 rtx op;
1452 unsigned int offset;
1453 enum machine_mode mode;
1455 rtx result = operand_subword (op, offset, 1, mode);
1457 if (result)
1458 return result;
1460 if (mode != BLKmode && mode != VOIDmode)
1462 /* If this is a register which can not be accessed by words, copy it
1463 to a pseudo register. */
1464 if (GET_CODE (op) == REG)
1465 op = copy_to_reg (op);
1466 else
1467 op = force_reg (mode, op);
1470 result = operand_subword (op, offset, 1, mode);
1471 if (result == 0)
1472 abort ();
1474 return result;
1477 /* Given a compare instruction, swap the operands.
1478 A test instruction is changed into a compare of 0 against the operand. */
1480 void
1481 reverse_comparison (insn)
1482 rtx insn;
1484 rtx body = PATTERN (insn);
1485 rtx comp;
1487 if (GET_CODE (body) == SET)
1488 comp = SET_SRC (body);
1489 else
1490 comp = SET_SRC (XVECEXP (body, 0, 0));
1492 if (GET_CODE (comp) == COMPARE)
1494 rtx op0 = XEXP (comp, 0);
1495 rtx op1 = XEXP (comp, 1);
1496 XEXP (comp, 0) = op1;
1497 XEXP (comp, 1) = op0;
1499 else
1501 rtx new = gen_rtx_COMPARE (VOIDmode,
1502 CONST0_RTX (GET_MODE (comp)), comp);
1503 if (GET_CODE (body) == SET)
1504 SET_SRC (body) = new;
1505 else
1506 SET_SRC (XVECEXP (body, 0, 0)) = new;
1510 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1511 or (2) a component ref of something variable. Represent the later with
1512 a NULL expression. */
1514 static tree
1515 component_ref_for_mem_expr (ref)
1516 tree ref;
1518 tree inner = TREE_OPERAND (ref, 0);
1520 if (TREE_CODE (inner) == COMPONENT_REF)
1521 inner = component_ref_for_mem_expr (inner);
1522 else
1524 tree placeholder_ptr = 0;
1526 /* Now remove any conversions: they don't change what the underlying
1527 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1528 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1529 || TREE_CODE (inner) == NON_LVALUE_EXPR
1530 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1531 || TREE_CODE (inner) == SAVE_EXPR
1532 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1533 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1534 inner = find_placeholder (inner, &placeholder_ptr);
1535 else
1536 inner = TREE_OPERAND (inner, 0);
1538 if (! DECL_P (inner))
1539 inner = NULL_TREE;
1542 if (inner == TREE_OPERAND (ref, 0))
1543 return ref;
1544 else
1545 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1546 TREE_OPERAND (ref, 1));
1549 /* Given REF, a MEM, and T, either the type of X or the expression
1550 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1551 if we are making a new object of this type. */
1553 void
1554 set_mem_attributes (ref, t, objectp)
1555 rtx ref;
1556 tree t;
1557 int objectp;
1559 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1560 tree expr = MEM_EXPR (ref);
1561 rtx offset = MEM_OFFSET (ref);
1562 rtx size = MEM_SIZE (ref);
1563 unsigned int align = MEM_ALIGN (ref);
1564 tree type;
1566 /* It can happen that type_for_mode was given a mode for which there
1567 is no language-level type. In which case it returns NULL, which
1568 we can see here. */
1569 if (t == NULL_TREE)
1570 return;
1572 type = TYPE_P (t) ? t : TREE_TYPE (t);
1574 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1575 wrong answer, as it assumes that DECL_RTL already has the right alias
1576 info. Callers should not set DECL_RTL until after the call to
1577 set_mem_attributes. */
1578 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1579 abort ();
1581 /* Get the alias set from the expression or type (perhaps using a
1582 front-end routine) and use it. */
1583 alias = get_alias_set (t);
1585 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1586 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1587 RTX_UNCHANGING_P (ref)
1588 |= ((lang_hooks.honor_readonly
1589 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1590 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1592 /* If we are making an object of this type, or if this is a DECL, we know
1593 that it is a scalar if the type is not an aggregate. */
1594 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1595 MEM_SCALAR_P (ref) = 1;
1597 /* We can set the alignment from the type if we are making an object,
1598 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1599 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1600 align = MAX (align, TYPE_ALIGN (type));
1602 /* If the size is known, we can set that. */
1603 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1604 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1606 /* If T is not a type, we may be able to deduce some more information about
1607 the expression. */
1608 if (! TYPE_P (t))
1610 maybe_set_unchanging (ref, t);
1611 if (TREE_THIS_VOLATILE (t))
1612 MEM_VOLATILE_P (ref) = 1;
1614 /* Now remove any conversions: they don't change what the underlying
1615 object is. Likewise for SAVE_EXPR. */
1616 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1617 || TREE_CODE (t) == NON_LVALUE_EXPR
1618 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1619 || TREE_CODE (t) == SAVE_EXPR)
1620 t = TREE_OPERAND (t, 0);
1622 /* If this expression can't be addressed (e.g., it contains a reference
1623 to a non-addressable field), show we don't change its alias set. */
1624 if (! can_address_p (t))
1625 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1627 /* If this is a decl, set the attributes of the MEM from it. */
1628 if (DECL_P (t))
1630 expr = t;
1631 offset = const0_rtx;
1632 size = (DECL_SIZE_UNIT (t)
1633 && host_integerp (DECL_SIZE_UNIT (t), 1)
1634 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1635 align = DECL_ALIGN (t);
1638 /* If this is a constant, we know the alignment. */
1639 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1641 align = TYPE_ALIGN (type);
1642 #ifdef CONSTANT_ALIGNMENT
1643 align = CONSTANT_ALIGNMENT (t, align);
1644 #endif
1647 /* If this is a field reference and not a bit-field, record it. */
1648 /* ??? There is some information that can be gleened from bit-fields,
1649 such as the word offset in the structure that might be modified.
1650 But skip it for now. */
1651 else if (TREE_CODE (t) == COMPONENT_REF
1652 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1654 expr = component_ref_for_mem_expr (t);
1655 offset = const0_rtx;
1656 /* ??? Any reason the field size would be different than
1657 the size we got from the type? */
1660 /* If this is an array reference, look for an outer field reference. */
1661 else if (TREE_CODE (t) == ARRAY_REF)
1663 tree off_tree = size_zero_node;
1667 off_tree
1668 = fold (build (PLUS_EXPR, sizetype,
1669 fold (build (MULT_EXPR, sizetype,
1670 TREE_OPERAND (t, 1),
1671 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1672 off_tree));
1673 t = TREE_OPERAND (t, 0);
1675 while (TREE_CODE (t) == ARRAY_REF);
1677 if (TREE_CODE (t) == COMPONENT_REF)
1679 expr = component_ref_for_mem_expr (t);
1680 if (host_integerp (off_tree, 1))
1681 offset = GEN_INT (tree_low_cst (off_tree, 1));
1682 /* ??? Any reason the field size would be different than
1683 the size we got from the type? */
1688 /* Now set the attributes we computed above. */
1689 MEM_ATTRS (ref)
1690 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1692 /* If this is already known to be a scalar or aggregate, we are done. */
1693 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1694 return;
1696 /* If it is a reference into an aggregate, this is part of an aggregate.
1697 Otherwise we don't know. */
1698 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1699 || TREE_CODE (t) == ARRAY_RANGE_REF
1700 || TREE_CODE (t) == BIT_FIELD_REF)
1701 MEM_IN_STRUCT_P (ref) = 1;
1704 /* Set the alias set of MEM to SET. */
1706 void
1707 set_mem_alias_set (mem, set)
1708 rtx mem;
1709 HOST_WIDE_INT set;
1711 #ifdef ENABLE_CHECKING
1712 /* If the new and old alias sets don't conflict, something is wrong. */
1713 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1714 abort ();
1715 #endif
1717 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1718 MEM_SIZE (mem), MEM_ALIGN (mem),
1719 GET_MODE (mem));
1722 /* Set the alignment of MEM to ALIGN bits. */
1724 void
1725 set_mem_align (mem, align)
1726 rtx mem;
1727 unsigned int align;
1729 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1730 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1731 GET_MODE (mem));
1734 /* Set the expr for MEM to EXPR. */
1736 void
1737 set_mem_expr (mem, expr)
1738 rtx mem;
1739 tree expr;
1741 MEM_ATTRS (mem)
1742 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1743 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1746 /* Set the offset of MEM to OFFSET. */
1748 void
1749 set_mem_offset (mem, offset)
1750 rtx mem, offset;
1752 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1753 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1754 GET_MODE (mem));
1757 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1758 and its address changed to ADDR. (VOIDmode means don't change the mode.
1759 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1760 returned memory location is required to be valid. The memory
1761 attributes are not changed. */
1763 static rtx
1764 change_address_1 (memref, mode, addr, validate)
1765 rtx memref;
1766 enum machine_mode mode;
1767 rtx addr;
1768 int validate;
1770 rtx new;
1772 if (GET_CODE (memref) != MEM)
1773 abort ();
1774 if (mode == VOIDmode)
1775 mode = GET_MODE (memref);
1776 if (addr == 0)
1777 addr = XEXP (memref, 0);
1779 if (validate)
1781 if (reload_in_progress || reload_completed)
1783 if (! memory_address_p (mode, addr))
1784 abort ();
1786 else
1787 addr = memory_address (mode, addr);
1790 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1791 return memref;
1793 new = gen_rtx_MEM (mode, addr);
1794 MEM_COPY_ATTRIBUTES (new, memref);
1795 return new;
1798 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1799 way we are changing MEMREF, so we only preserve the alias set. */
1802 change_address (memref, mode, addr)
1803 rtx memref;
1804 enum machine_mode mode;
1805 rtx addr;
1807 rtx new = change_address_1 (memref, mode, addr, 1);
1808 enum machine_mode mmode = GET_MODE (new);
1810 MEM_ATTRS (new)
1811 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1812 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1813 (mmode == BLKmode ? BITS_PER_UNIT
1814 : GET_MODE_ALIGNMENT (mmode)),
1815 mmode);
1817 return new;
1820 /* Return a memory reference like MEMREF, but with its mode changed
1821 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1822 nonzero, the memory address is forced to be valid.
1823 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1824 and caller is responsible for adjusting MEMREF base register. */
1827 adjust_address_1 (memref, mode, offset, validate, adjust)
1828 rtx memref;
1829 enum machine_mode mode;
1830 HOST_WIDE_INT offset;
1831 int validate, adjust;
1833 rtx addr = XEXP (memref, 0);
1834 rtx new;
1835 rtx memoffset = MEM_OFFSET (memref);
1836 rtx size = 0;
1837 unsigned int memalign = MEM_ALIGN (memref);
1839 /* ??? Prefer to create garbage instead of creating shared rtl.
1840 This may happen even if offset is non-zero -- consider
1841 (plus (plus reg reg) const_int) -- so do this always. */
1842 addr = copy_rtx (addr);
1844 if (adjust)
1846 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1847 object, we can merge it into the LO_SUM. */
1848 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1849 && offset >= 0
1850 && (unsigned HOST_WIDE_INT) offset
1851 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1852 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1853 plus_constant (XEXP (addr, 1), offset));
1854 else
1855 addr = plus_constant (addr, offset);
1858 new = change_address_1 (memref, mode, addr, validate);
1860 /* Compute the new values of the memory attributes due to this adjustment.
1861 We add the offsets and update the alignment. */
1862 if (memoffset)
1863 memoffset = GEN_INT (offset + INTVAL (memoffset));
1865 /* Compute the new alignment by taking the MIN of the alignment and the
1866 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1867 if zero. */
1868 if (offset != 0)
1869 memalign
1870 = MIN (memalign,
1871 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1873 /* We can compute the size in a number of ways. */
1874 if (GET_MODE (new) != BLKmode)
1875 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1876 else if (MEM_SIZE (memref))
1877 size = plus_constant (MEM_SIZE (memref), -offset);
1879 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1880 memoffset, size, memalign, GET_MODE (new));
1882 /* At some point, we should validate that this offset is within the object,
1883 if all the appropriate values are known. */
1884 return new;
1887 /* Return a memory reference like MEMREF, but with its mode changed
1888 to MODE and its address changed to ADDR, which is assumed to be
1889 MEMREF offseted by OFFSET bytes. If VALIDATE is
1890 nonzero, the memory address is forced to be valid. */
1893 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
1894 rtx memref;
1895 enum machine_mode mode;
1896 rtx addr;
1897 HOST_WIDE_INT offset;
1898 int validate;
1900 memref = change_address_1 (memref, VOIDmode, addr, validate);
1901 return adjust_address_1 (memref, mode, offset, validate, 0);
1904 /* Return a memory reference like MEMREF, but whose address is changed by
1905 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1906 known to be in OFFSET (possibly 1). */
1909 offset_address (memref, offset, pow2)
1910 rtx memref;
1911 rtx offset;
1912 HOST_WIDE_INT pow2;
1914 rtx new, addr = XEXP (memref, 0);
1916 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1918 /* At this point we don't know _why_ the address is invalid. It
1919 could have secondary memory refereces, multiplies or anything.
1921 However, if we did go and rearrange things, we can wind up not
1922 being able to recognize the magic around pic_offset_table_rtx.
1923 This stuff is fragile, and is yet another example of why it is
1924 bad to expose PIC machinery too early. */
1925 if (! memory_address_p (GET_MODE (memref), new)
1926 && GET_CODE (addr) == PLUS
1927 && XEXP (addr, 0) == pic_offset_table_rtx)
1929 addr = force_reg (GET_MODE (addr), addr);
1930 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1933 update_temp_slot_address (XEXP (memref, 0), new);
1934 new = change_address_1 (memref, VOIDmode, new, 1);
1936 /* Update the alignment to reflect the offset. Reset the offset, which
1937 we don't know. */
1938 MEM_ATTRS (new)
1939 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1940 MIN (MEM_ALIGN (memref),
1941 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
1942 GET_MODE (new));
1943 return new;
1946 /* Return a memory reference like MEMREF, but with its address changed to
1947 ADDR. The caller is asserting that the actual piece of memory pointed
1948 to is the same, just the form of the address is being changed, such as
1949 by putting something into a register. */
1952 replace_equiv_address (memref, addr)
1953 rtx memref;
1954 rtx addr;
1956 /* change_address_1 copies the memory attribute structure without change
1957 and that's exactly what we want here. */
1958 update_temp_slot_address (XEXP (memref, 0), addr);
1959 return change_address_1 (memref, VOIDmode, addr, 1);
1962 /* Likewise, but the reference is not required to be valid. */
1965 replace_equiv_address_nv (memref, addr)
1966 rtx memref;
1967 rtx addr;
1969 return change_address_1 (memref, VOIDmode, addr, 0);
1972 /* Return a memory reference like MEMREF, but with its mode widened to
1973 MODE and offset by OFFSET. This would be used by targets that e.g.
1974 cannot issue QImode memory operations and have to use SImode memory
1975 operations plus masking logic. */
1978 widen_memory_access (memref, mode, offset)
1979 rtx memref;
1980 enum machine_mode mode;
1981 HOST_WIDE_INT offset;
1983 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1984 tree expr = MEM_EXPR (new);
1985 rtx memoffset = MEM_OFFSET (new);
1986 unsigned int size = GET_MODE_SIZE (mode);
1988 /* If we don't know what offset we were at within the expression, then
1989 we can't know if we've overstepped the bounds. */
1990 if (! memoffset && offset != 0)
1991 expr = NULL_TREE;
1993 while (expr)
1995 if (TREE_CODE (expr) == COMPONENT_REF)
1997 tree field = TREE_OPERAND (expr, 1);
1999 if (! DECL_SIZE_UNIT (field))
2001 expr = NULL_TREE;
2002 break;
2005 /* Is the field at least as large as the access? If so, ok,
2006 otherwise strip back to the containing structure. */
2007 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2008 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2009 && INTVAL (memoffset) >= 0)
2010 break;
2012 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2014 expr = NULL_TREE;
2015 break;
2018 expr = TREE_OPERAND (expr, 0);
2019 memoffset = (GEN_INT (INTVAL (memoffset)
2020 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2021 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2022 / BITS_PER_UNIT)));
2024 /* Similarly for the decl. */
2025 else if (DECL_P (expr)
2026 && DECL_SIZE_UNIT (expr)
2027 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2028 && (! memoffset || INTVAL (memoffset) >= 0))
2029 break;
2030 else
2032 /* The widened memory access overflows the expression, which means
2033 that it could alias another expression. Zap it. */
2034 expr = NULL_TREE;
2035 break;
2039 if (! expr)
2040 memoffset = NULL_RTX;
2042 /* The widened memory may alias other stuff, so zap the alias set. */
2043 /* ??? Maybe use get_alias_set on any remaining expression. */
2045 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2046 MEM_ALIGN (new), mode);
2048 return new;
2051 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2054 gen_label_rtx ()
2056 rtx label;
2058 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2059 NULL_RTX, label_num++, NULL, NULL);
2061 LABEL_NUSES (label) = 0;
2062 LABEL_ALTERNATE_NAME (label) = NULL;
2063 return label;
2066 /* For procedure integration. */
2068 /* Install new pointers to the first and last insns in the chain.
2069 Also, set cur_insn_uid to one higher than the last in use.
2070 Used for an inline-procedure after copying the insn chain. */
2072 void
2073 set_new_first_and_last_insn (first, last)
2074 rtx first, last;
2076 rtx insn;
2078 first_insn = first;
2079 last_insn = last;
2080 cur_insn_uid = 0;
2082 for (insn = first; insn; insn = NEXT_INSN (insn))
2083 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2085 cur_insn_uid++;
2088 /* Set the range of label numbers found in the current function.
2089 This is used when belatedly compiling an inline function. */
2091 void
2092 set_new_first_and_last_label_num (first, last)
2093 int first, last;
2095 base_label_num = label_num;
2096 first_label_num = first;
2097 last_label_num = last;
2100 /* Set the last label number found in the current function.
2101 This is used when belatedly compiling an inline function. */
2103 void
2104 set_new_last_label_num (last)
2105 int last;
2107 base_label_num = label_num;
2108 last_label_num = last;
2111 /* Restore all variables describing the current status from the structure *P.
2112 This is used after a nested function. */
2114 void
2115 restore_emit_status (p)
2116 struct function *p ATTRIBUTE_UNUSED;
2118 last_label_num = 0;
2119 clear_emit_caches ();
2122 /* Clear out all parts of the state in F that can safely be discarded
2123 after the function has been compiled, to let garbage collection
2124 reclaim the memory. */
2126 void
2127 free_emit_status (f)
2128 struct function *f;
2130 free (f->emit->x_regno_reg_rtx);
2131 free (f->emit->regno_pointer_align);
2132 free (f->emit->regno_decl);
2133 free (f->emit);
2134 f->emit = NULL;
2137 /* Go through all the RTL insn bodies and copy any invalid shared
2138 structure. This routine should only be called once. */
2140 void
2141 unshare_all_rtl (fndecl, insn)
2142 tree fndecl;
2143 rtx insn;
2145 tree decl;
2147 /* Make sure that virtual parameters are not shared. */
2148 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2149 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2151 /* Make sure that virtual stack slots are not shared. */
2152 unshare_all_decls (DECL_INITIAL (fndecl));
2154 /* Unshare just about everything else. */
2155 unshare_all_rtl_1 (insn);
2157 /* Make sure the addresses of stack slots found outside the insn chain
2158 (such as, in DECL_RTL of a variable) are not shared
2159 with the insn chain.
2161 This special care is necessary when the stack slot MEM does not
2162 actually appear in the insn chain. If it does appear, its address
2163 is unshared from all else at that point. */
2164 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2167 /* Go through all the RTL insn bodies and copy any invalid shared
2168 structure, again. This is a fairly expensive thing to do so it
2169 should be done sparingly. */
2171 void
2172 unshare_all_rtl_again (insn)
2173 rtx insn;
2175 rtx p;
2176 tree decl;
2178 for (p = insn; p; p = NEXT_INSN (p))
2179 if (INSN_P (p))
2181 reset_used_flags (PATTERN (p));
2182 reset_used_flags (REG_NOTES (p));
2183 reset_used_flags (LOG_LINKS (p));
2186 /* Make sure that virtual stack slots are not shared. */
2187 reset_used_decls (DECL_INITIAL (cfun->decl));
2189 /* Make sure that virtual parameters are not shared. */
2190 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2191 reset_used_flags (DECL_RTL (decl));
2193 reset_used_flags (stack_slot_list);
2195 unshare_all_rtl (cfun->decl, insn);
2198 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2199 Assumes the mark bits are cleared at entry. */
2201 static void
2202 unshare_all_rtl_1 (insn)
2203 rtx insn;
2205 for (; insn; insn = NEXT_INSN (insn))
2206 if (INSN_P (insn))
2208 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2209 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2210 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2214 /* Go through all virtual stack slots of a function and copy any
2215 shared structure. */
2216 static void
2217 unshare_all_decls (blk)
2218 tree blk;
2220 tree t;
2222 /* Copy shared decls. */
2223 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2224 if (DECL_RTL_SET_P (t))
2225 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2227 /* Now process sub-blocks. */
2228 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2229 unshare_all_decls (t);
2232 /* Go through all virtual stack slots of a function and mark them as
2233 not shared. */
2234 static void
2235 reset_used_decls (blk)
2236 tree blk;
2238 tree t;
2240 /* Mark decls. */
2241 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2242 if (DECL_RTL_SET_P (t))
2243 reset_used_flags (DECL_RTL (t));
2245 /* Now process sub-blocks. */
2246 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2247 reset_used_decls (t);
2250 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2251 placed in the result directly, rather than being copied. MAY_SHARE is
2252 either a MEM of an EXPR_LIST of MEMs. */
2255 copy_most_rtx (orig, may_share)
2256 rtx orig;
2257 rtx may_share;
2259 rtx copy;
2260 int i, j;
2261 RTX_CODE code;
2262 const char *format_ptr;
2264 if (orig == may_share
2265 || (GET_CODE (may_share) == EXPR_LIST
2266 && in_expr_list_p (may_share, orig)))
2267 return orig;
2269 code = GET_CODE (orig);
2271 switch (code)
2273 case REG:
2274 case QUEUED:
2275 case CONST_INT:
2276 case CONST_DOUBLE:
2277 case CONST_VECTOR:
2278 case SYMBOL_REF:
2279 case CODE_LABEL:
2280 case PC:
2281 case CC0:
2282 return orig;
2283 default:
2284 break;
2287 copy = rtx_alloc (code);
2288 PUT_MODE (copy, GET_MODE (orig));
2289 copy->in_struct = orig->in_struct;
2290 copy->volatil = orig->volatil;
2291 copy->unchanging = orig->unchanging;
2292 copy->integrated = orig->integrated;
2293 copy->frame_related = orig->frame_related;
2295 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2297 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2299 switch (*format_ptr++)
2301 case 'e':
2302 XEXP (copy, i) = XEXP (orig, i);
2303 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2304 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2305 break;
2307 case 'u':
2308 XEXP (copy, i) = XEXP (orig, i);
2309 break;
2311 case 'E':
2312 case 'V':
2313 XVEC (copy, i) = XVEC (orig, i);
2314 if (XVEC (orig, i) != NULL)
2316 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2317 for (j = 0; j < XVECLEN (copy, i); j++)
2318 XVECEXP (copy, i, j)
2319 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2321 break;
2323 case 'w':
2324 XWINT (copy, i) = XWINT (orig, i);
2325 break;
2327 case 'n':
2328 case 'i':
2329 XINT (copy, i) = XINT (orig, i);
2330 break;
2332 case 't':
2333 XTREE (copy, i) = XTREE (orig, i);
2334 break;
2336 case 's':
2337 case 'S':
2338 XSTR (copy, i) = XSTR (orig, i);
2339 break;
2341 case '0':
2342 /* Copy this through the wide int field; that's safest. */
2343 X0WINT (copy, i) = X0WINT (orig, i);
2344 break;
2346 default:
2347 abort ();
2350 return copy;
2353 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2354 Recursively does the same for subexpressions. */
2357 copy_rtx_if_shared (orig)
2358 rtx orig;
2360 rtx x = orig;
2361 int i;
2362 enum rtx_code code;
2363 const char *format_ptr;
2364 int copied = 0;
2366 if (x == 0)
2367 return 0;
2369 code = GET_CODE (x);
2371 /* These types may be freely shared. */
2373 switch (code)
2375 case REG:
2376 case QUEUED:
2377 case CONST_INT:
2378 case CONST_DOUBLE:
2379 case CONST_VECTOR:
2380 case SYMBOL_REF:
2381 case CODE_LABEL:
2382 case PC:
2383 case CC0:
2384 case SCRATCH:
2385 /* SCRATCH must be shared because they represent distinct values. */
2386 return x;
2388 case CONST:
2389 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2390 a LABEL_REF, it isn't sharable. */
2391 if (GET_CODE (XEXP (x, 0)) == PLUS
2392 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2393 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2394 return x;
2395 break;
2397 case INSN:
2398 case JUMP_INSN:
2399 case CALL_INSN:
2400 case NOTE:
2401 case BARRIER:
2402 /* The chain of insns is not being copied. */
2403 return x;
2405 case MEM:
2406 /* A MEM is allowed to be shared if its address is constant.
2408 We used to allow sharing of MEMs which referenced
2409 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2410 that can lose. instantiate_virtual_regs will not unshare
2411 the MEMs, and combine may change the structure of the address
2412 because it looks safe and profitable in one context, but
2413 in some other context it creates unrecognizable RTL. */
2414 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2415 return x;
2417 break;
2419 default:
2420 break;
2423 /* This rtx may not be shared. If it has already been seen,
2424 replace it with a copy of itself. */
2426 if (x->used)
2428 rtx copy;
2430 copy = rtx_alloc (code);
2431 memcpy (copy, x,
2432 (sizeof (*copy) - sizeof (copy->fld)
2433 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2434 x = copy;
2435 copied = 1;
2437 x->used = 1;
2439 /* Now scan the subexpressions recursively.
2440 We can store any replaced subexpressions directly into X
2441 since we know X is not shared! Any vectors in X
2442 must be copied if X was copied. */
2444 format_ptr = GET_RTX_FORMAT (code);
2446 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2448 switch (*format_ptr++)
2450 case 'e':
2451 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2452 break;
2454 case 'E':
2455 if (XVEC (x, i) != NULL)
2457 int j;
2458 int len = XVECLEN (x, i);
2460 if (copied && len > 0)
2461 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2462 for (j = 0; j < len; j++)
2463 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2465 break;
2468 return x;
2471 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2472 to look for shared sub-parts. */
2474 void
2475 reset_used_flags (x)
2476 rtx x;
2478 int i, j;
2479 enum rtx_code code;
2480 const char *format_ptr;
2482 if (x == 0)
2483 return;
2485 code = GET_CODE (x);
2487 /* These types may be freely shared so we needn't do any resetting
2488 for them. */
2490 switch (code)
2492 case REG:
2493 case QUEUED:
2494 case CONST_INT:
2495 case CONST_DOUBLE:
2496 case CONST_VECTOR:
2497 case SYMBOL_REF:
2498 case CODE_LABEL:
2499 case PC:
2500 case CC0:
2501 return;
2503 case INSN:
2504 case JUMP_INSN:
2505 case CALL_INSN:
2506 case NOTE:
2507 case LABEL_REF:
2508 case BARRIER:
2509 /* The chain of insns is not being copied. */
2510 return;
2512 default:
2513 break;
2516 x->used = 0;
2518 format_ptr = GET_RTX_FORMAT (code);
2519 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2521 switch (*format_ptr++)
2523 case 'e':
2524 reset_used_flags (XEXP (x, i));
2525 break;
2527 case 'E':
2528 for (j = 0; j < XVECLEN (x, i); j++)
2529 reset_used_flags (XVECEXP (x, i, j));
2530 break;
2535 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2536 Return X or the rtx for the pseudo reg the value of X was copied into.
2537 OTHER must be valid as a SET_DEST. */
2540 make_safe_from (x, other)
2541 rtx x, other;
2543 while (1)
2544 switch (GET_CODE (other))
2546 case SUBREG:
2547 other = SUBREG_REG (other);
2548 break;
2549 case STRICT_LOW_PART:
2550 case SIGN_EXTEND:
2551 case ZERO_EXTEND:
2552 other = XEXP (other, 0);
2553 break;
2554 default:
2555 goto done;
2557 done:
2558 if ((GET_CODE (other) == MEM
2559 && ! CONSTANT_P (x)
2560 && GET_CODE (x) != REG
2561 && GET_CODE (x) != SUBREG)
2562 || (GET_CODE (other) == REG
2563 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2564 || reg_mentioned_p (other, x))))
2566 rtx temp = gen_reg_rtx (GET_MODE (x));
2567 emit_move_insn (temp, x);
2568 return temp;
2570 return x;
2573 /* Emission of insns (adding them to the doubly-linked list). */
2575 /* Return the first insn of the current sequence or current function. */
2578 get_insns ()
2580 return first_insn;
2583 /* Specify a new insn as the first in the chain. */
2585 void
2586 set_first_insn (insn)
2587 rtx insn;
2589 if (PREV_INSN (insn) != 0)
2590 abort ();
2591 first_insn = insn;
2594 /* Return the last insn emitted in current sequence or current function. */
2597 get_last_insn ()
2599 return last_insn;
2602 /* Specify a new insn as the last in the chain. */
2604 void
2605 set_last_insn (insn)
2606 rtx insn;
2608 if (NEXT_INSN (insn) != 0)
2609 abort ();
2610 last_insn = insn;
2613 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2616 get_last_insn_anywhere ()
2618 struct sequence_stack *stack;
2619 if (last_insn)
2620 return last_insn;
2621 for (stack = seq_stack; stack; stack = stack->next)
2622 if (stack->last != 0)
2623 return stack->last;
2624 return 0;
2627 /* Return a number larger than any instruction's uid in this function. */
2630 get_max_uid ()
2632 return cur_insn_uid;
2635 /* Renumber instructions so that no instruction UIDs are wasted. */
2637 void
2638 renumber_insns (stream)
2639 FILE *stream;
2641 rtx insn;
2643 /* If we're not supposed to renumber instructions, don't. */
2644 if (!flag_renumber_insns)
2645 return;
2647 /* If there aren't that many instructions, then it's not really
2648 worth renumbering them. */
2649 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2650 return;
2652 cur_insn_uid = 1;
2654 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2656 if (stream)
2657 fprintf (stream, "Renumbering insn %d to %d\n",
2658 INSN_UID (insn), cur_insn_uid);
2659 INSN_UID (insn) = cur_insn_uid++;
2663 /* Return the next insn. If it is a SEQUENCE, return the first insn
2664 of the sequence. */
2667 next_insn (insn)
2668 rtx insn;
2670 if (insn)
2672 insn = NEXT_INSN (insn);
2673 if (insn && GET_CODE (insn) == INSN
2674 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2675 insn = XVECEXP (PATTERN (insn), 0, 0);
2678 return insn;
2681 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2682 of the sequence. */
2685 previous_insn (insn)
2686 rtx insn;
2688 if (insn)
2690 insn = PREV_INSN (insn);
2691 if (insn && GET_CODE (insn) == INSN
2692 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2693 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2696 return insn;
2699 /* Return the next insn after INSN that is not a NOTE. This routine does not
2700 look inside SEQUENCEs. */
2703 next_nonnote_insn (insn)
2704 rtx insn;
2706 while (insn)
2708 insn = NEXT_INSN (insn);
2709 if (insn == 0 || GET_CODE (insn) != NOTE)
2710 break;
2713 return insn;
2716 /* Return the previous insn before INSN that is not a NOTE. This routine does
2717 not look inside SEQUENCEs. */
2720 prev_nonnote_insn (insn)
2721 rtx insn;
2723 while (insn)
2725 insn = PREV_INSN (insn);
2726 if (insn == 0 || GET_CODE (insn) != NOTE)
2727 break;
2730 return insn;
2733 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2734 or 0, if there is none. This routine does not look inside
2735 SEQUENCEs. */
2738 next_real_insn (insn)
2739 rtx insn;
2741 while (insn)
2743 insn = NEXT_INSN (insn);
2744 if (insn == 0 || GET_CODE (insn) == INSN
2745 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2746 break;
2749 return insn;
2752 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2753 or 0, if there is none. This routine does not look inside
2754 SEQUENCEs. */
2757 prev_real_insn (insn)
2758 rtx insn;
2760 while (insn)
2762 insn = PREV_INSN (insn);
2763 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2764 || GET_CODE (insn) == JUMP_INSN)
2765 break;
2768 return insn;
2771 /* Find the next insn after INSN that really does something. This routine
2772 does not look inside SEQUENCEs. Until reload has completed, this is the
2773 same as next_real_insn. */
2776 active_insn_p (insn)
2777 rtx insn;
2779 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2780 || (GET_CODE (insn) == INSN
2781 && (! reload_completed
2782 || (GET_CODE (PATTERN (insn)) != USE
2783 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2787 next_active_insn (insn)
2788 rtx insn;
2790 while (insn)
2792 insn = NEXT_INSN (insn);
2793 if (insn == 0 || active_insn_p (insn))
2794 break;
2797 return insn;
2800 /* Find the last insn before INSN that really does something. This routine
2801 does not look inside SEQUENCEs. Until reload has completed, this is the
2802 same as prev_real_insn. */
2805 prev_active_insn (insn)
2806 rtx insn;
2808 while (insn)
2810 insn = PREV_INSN (insn);
2811 if (insn == 0 || active_insn_p (insn))
2812 break;
2815 return insn;
2818 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2821 next_label (insn)
2822 rtx insn;
2824 while (insn)
2826 insn = NEXT_INSN (insn);
2827 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2828 break;
2831 return insn;
2834 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2837 prev_label (insn)
2838 rtx insn;
2840 while (insn)
2842 insn = PREV_INSN (insn);
2843 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2844 break;
2847 return insn;
2850 #ifdef HAVE_cc0
2851 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2852 and REG_CC_USER notes so we can find it. */
2854 void
2855 link_cc0_insns (insn)
2856 rtx insn;
2858 rtx user = next_nonnote_insn (insn);
2860 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2861 user = XVECEXP (PATTERN (user), 0, 0);
2863 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2864 REG_NOTES (user));
2865 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2868 /* Return the next insn that uses CC0 after INSN, which is assumed to
2869 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2870 applied to the result of this function should yield INSN).
2872 Normally, this is simply the next insn. However, if a REG_CC_USER note
2873 is present, it contains the insn that uses CC0.
2875 Return 0 if we can't find the insn. */
2878 next_cc0_user (insn)
2879 rtx insn;
2881 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2883 if (note)
2884 return XEXP (note, 0);
2886 insn = next_nonnote_insn (insn);
2887 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2888 insn = XVECEXP (PATTERN (insn), 0, 0);
2890 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2891 return insn;
2893 return 0;
2896 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2897 note, it is the previous insn. */
2900 prev_cc0_setter (insn)
2901 rtx insn;
2903 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2905 if (note)
2906 return XEXP (note, 0);
2908 insn = prev_nonnote_insn (insn);
2909 if (! sets_cc0_p (PATTERN (insn)))
2910 abort ();
2912 return insn;
2914 #endif
2916 /* Increment the label uses for all labels present in rtx. */
2918 static void
2919 mark_label_nuses(x)
2920 rtx x;
2922 enum rtx_code code;
2923 int i, j;
2924 const char *fmt;
2926 code = GET_CODE (x);
2927 if (code == LABEL_REF)
2928 LABEL_NUSES (XEXP (x, 0))++;
2930 fmt = GET_RTX_FORMAT (code);
2931 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2933 if (fmt[i] == 'e')
2934 mark_label_nuses (XEXP (x, i));
2935 else if (fmt[i] == 'E')
2936 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2937 mark_label_nuses (XVECEXP (x, i, j));
2942 /* Try splitting insns that can be split for better scheduling.
2943 PAT is the pattern which might split.
2944 TRIAL is the insn providing PAT.
2945 LAST is non-zero if we should return the last insn of the sequence produced.
2947 If this routine succeeds in splitting, it returns the first or last
2948 replacement insn depending on the value of LAST. Otherwise, it
2949 returns TRIAL. If the insn to be returned can be split, it will be. */
2952 try_split (pat, trial, last)
2953 rtx pat, trial;
2954 int last;
2956 rtx before = PREV_INSN (trial);
2957 rtx after = NEXT_INSN (trial);
2958 int has_barrier = 0;
2959 rtx tem;
2960 rtx note, seq;
2961 int probability;
2963 if (any_condjump_p (trial)
2964 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2965 split_branch_probability = INTVAL (XEXP (note, 0));
2966 probability = split_branch_probability;
2968 seq = split_insns (pat, trial);
2970 split_branch_probability = -1;
2972 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2973 We may need to handle this specially. */
2974 if (after && GET_CODE (after) == BARRIER)
2976 has_barrier = 1;
2977 after = NEXT_INSN (after);
2980 if (seq)
2982 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2983 The latter case will normally arise only when being done so that
2984 it, in turn, will be split (SFmode on the 29k is an example). */
2985 if (GET_CODE (seq) == SEQUENCE)
2987 int i, njumps = 0;
2989 /* Avoid infinite loop if any insn of the result matches
2990 the original pattern. */
2991 for (i = 0; i < XVECLEN (seq, 0); i++)
2992 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2993 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2994 return trial;
2996 /* Mark labels. */
2997 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2998 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3000 rtx insn = XVECEXP (seq, 0, i);
3001 mark_jump_label (PATTERN (insn),
3002 XVECEXP (seq, 0, i), 0);
3003 njumps++;
3004 if (probability != -1
3005 && any_condjump_p (insn)
3006 && !find_reg_note (insn, REG_BR_PROB, 0))
3008 /* We can preserve the REG_BR_PROB notes only if exactly
3009 one jump is created, otherwise the machine description
3010 is responsible for this step using
3011 split_branch_probability variable. */
3012 if (njumps != 1)
3013 abort ();
3014 REG_NOTES (insn)
3015 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3016 GEN_INT (probability),
3017 REG_NOTES (insn));
3021 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3022 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3023 if (GET_CODE (trial) == CALL_INSN)
3024 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3025 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3026 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3027 = CALL_INSN_FUNCTION_USAGE (trial);
3029 /* Copy notes, particularly those related to the CFG. */
3030 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
3032 switch (REG_NOTE_KIND (note))
3034 case REG_EH_REGION:
3035 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3037 rtx insn = XVECEXP (seq, 0, i);
3038 if (GET_CODE (insn) == CALL_INSN
3039 || (flag_non_call_exceptions
3040 && may_trap_p (PATTERN (insn))))
3041 REG_NOTES (insn)
3042 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3043 XEXP (note, 0),
3044 REG_NOTES (insn));
3046 break;
3048 case REG_NORETURN:
3049 case REG_SETJMP:
3050 case REG_ALWAYS_RETURN:
3051 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3053 rtx insn = XVECEXP (seq, 0, i);
3054 if (GET_CODE (insn) == CALL_INSN)
3055 REG_NOTES (insn)
3056 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3057 XEXP (note, 0),
3058 REG_NOTES (insn));
3060 break;
3062 case REG_NON_LOCAL_GOTO:
3063 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3065 rtx insn = XVECEXP (seq, 0, i);
3066 if (GET_CODE (insn) == JUMP_INSN)
3067 REG_NOTES (insn)
3068 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3069 XEXP (note, 0),
3070 REG_NOTES (insn));
3072 break;
3074 default:
3075 break;
3079 /* If there are LABELS inside the split insns increment the
3080 usage count so we don't delete the label. */
3081 if (GET_CODE (trial) == INSN)
3082 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3083 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3084 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3086 tem = emit_insn_after (seq, trial);
3088 delete_insn (trial);
3089 if (has_barrier)
3090 emit_barrier_after (tem);
3092 /* Recursively call try_split for each new insn created; by the
3093 time control returns here that insn will be fully split, so
3094 set LAST and continue from the insn after the one returned.
3095 We can't use next_active_insn here since AFTER may be a note.
3096 Ignore deleted insns, which can be occur if not optimizing. */
3097 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3098 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3099 tem = try_split (PATTERN (tem), tem, 1);
3101 /* Avoid infinite loop if the result matches the original pattern. */
3102 else if (rtx_equal_p (seq, pat))
3103 return trial;
3104 else
3106 PATTERN (trial) = seq;
3107 INSN_CODE (trial) = -1;
3108 try_split (seq, trial, last);
3111 /* Return either the first or the last insn, depending on which was
3112 requested. */
3113 return last
3114 ? (after ? PREV_INSN (after) : last_insn)
3115 : NEXT_INSN (before);
3118 return trial;
3121 /* Make and return an INSN rtx, initializing all its slots.
3122 Store PATTERN in the pattern slots. */
3125 make_insn_raw (pattern)
3126 rtx pattern;
3128 rtx insn;
3130 insn = rtx_alloc (INSN);
3132 INSN_UID (insn) = cur_insn_uid++;
3133 PATTERN (insn) = pattern;
3134 INSN_CODE (insn) = -1;
3135 LOG_LINKS (insn) = NULL;
3136 REG_NOTES (insn) = NULL;
3138 #ifdef ENABLE_RTL_CHECKING
3139 if (insn
3140 && INSN_P (insn)
3141 && (returnjump_p (insn)
3142 || (GET_CODE (insn) == SET
3143 && SET_DEST (insn) == pc_rtx)))
3145 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3146 debug_rtx (insn);
3148 #endif
3150 return insn;
3153 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3155 static rtx
3156 make_jump_insn_raw (pattern)
3157 rtx pattern;
3159 rtx insn;
3161 insn = rtx_alloc (JUMP_INSN);
3162 INSN_UID (insn) = cur_insn_uid++;
3164 PATTERN (insn) = pattern;
3165 INSN_CODE (insn) = -1;
3166 LOG_LINKS (insn) = NULL;
3167 REG_NOTES (insn) = NULL;
3168 JUMP_LABEL (insn) = NULL;
3170 return insn;
3173 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3175 static rtx
3176 make_call_insn_raw (pattern)
3177 rtx pattern;
3179 rtx insn;
3181 insn = rtx_alloc (CALL_INSN);
3182 INSN_UID (insn) = cur_insn_uid++;
3184 PATTERN (insn) = pattern;
3185 INSN_CODE (insn) = -1;
3186 LOG_LINKS (insn) = NULL;
3187 REG_NOTES (insn) = NULL;
3188 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3190 return insn;
3193 /* Add INSN to the end of the doubly-linked list.
3194 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3196 void
3197 add_insn (insn)
3198 rtx insn;
3200 PREV_INSN (insn) = last_insn;
3201 NEXT_INSN (insn) = 0;
3203 if (NULL != last_insn)
3204 NEXT_INSN (last_insn) = insn;
3206 if (NULL == first_insn)
3207 first_insn = insn;
3209 last_insn = insn;
3212 /* Add INSN into the doubly-linked list after insn AFTER. This and
3213 the next should be the only functions called to insert an insn once
3214 delay slots have been filled since only they know how to update a
3215 SEQUENCE. */
3217 void
3218 add_insn_after (insn, after)
3219 rtx insn, after;
3221 rtx next = NEXT_INSN (after);
3222 basic_block bb;
3224 if (optimize && INSN_DELETED_P (after))
3225 abort ();
3227 NEXT_INSN (insn) = next;
3228 PREV_INSN (insn) = after;
3230 if (next)
3232 PREV_INSN (next) = insn;
3233 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3234 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3236 else if (last_insn == after)
3237 last_insn = insn;
3238 else
3240 struct sequence_stack *stack = seq_stack;
3241 /* Scan all pending sequences too. */
3242 for (; stack; stack = stack->next)
3243 if (after == stack->last)
3245 stack->last = insn;
3246 break;
3249 if (stack == 0)
3250 abort ();
3253 if (basic_block_for_insn
3254 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3255 && (bb = BLOCK_FOR_INSN (after)))
3257 set_block_for_insn (insn, bb);
3258 if (INSN_P (insn))
3259 bb->flags |= BB_DIRTY;
3260 /* Should not happen as first in the BB is always
3261 either NOTE or LABEL. */
3262 if (bb->end == after
3263 /* Avoid clobbering of structure when creating new BB. */
3264 && GET_CODE (insn) != BARRIER
3265 && (GET_CODE (insn) != NOTE
3266 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3267 bb->end = insn;
3270 NEXT_INSN (after) = insn;
3271 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3273 rtx sequence = PATTERN (after);
3274 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3278 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3279 the previous should be the only functions called to insert an insn once
3280 delay slots have been filled since only they know how to update a
3281 SEQUENCE. */
3283 void
3284 add_insn_before (insn, before)
3285 rtx insn, before;
3287 rtx prev = PREV_INSN (before);
3288 basic_block bb;
3290 if (optimize && INSN_DELETED_P (before))
3291 abort ();
3293 PREV_INSN (insn) = prev;
3294 NEXT_INSN (insn) = before;
3296 if (prev)
3298 NEXT_INSN (prev) = insn;
3299 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3301 rtx sequence = PATTERN (prev);
3302 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3305 else if (first_insn == before)
3306 first_insn = insn;
3307 else
3309 struct sequence_stack *stack = seq_stack;
3310 /* Scan all pending sequences too. */
3311 for (; stack; stack = stack->next)
3312 if (before == stack->first)
3314 stack->first = insn;
3315 break;
3318 if (stack == 0)
3319 abort ();
3322 if (basic_block_for_insn
3323 && (unsigned int) INSN_UID (before) < basic_block_for_insn->num_elements
3324 && (bb = BLOCK_FOR_INSN (before)))
3326 set_block_for_insn (insn, bb);
3327 if (INSN_P (insn))
3328 bb->flags |= BB_DIRTY;
3329 /* Should not happen as first in the BB is always
3330 either NOTE or LABEl. */
3331 if (bb->head == insn
3332 /* Avoid clobbering of structure when creating new BB. */
3333 && GET_CODE (insn) != BARRIER
3334 && (GET_CODE (insn) != NOTE
3335 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3336 abort ();
3339 PREV_INSN (before) = insn;
3340 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3341 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3344 /* Remove an insn from its doubly-linked list. This function knows how
3345 to handle sequences. */
3346 void
3347 remove_insn (insn)
3348 rtx insn;
3350 rtx next = NEXT_INSN (insn);
3351 rtx prev = PREV_INSN (insn);
3352 basic_block bb;
3354 if (prev)
3356 NEXT_INSN (prev) = next;
3357 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3359 rtx sequence = PATTERN (prev);
3360 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3363 else if (first_insn == insn)
3364 first_insn = next;
3365 else
3367 struct sequence_stack *stack = seq_stack;
3368 /* Scan all pending sequences too. */
3369 for (; stack; stack = stack->next)
3370 if (insn == stack->first)
3372 stack->first = next;
3373 break;
3376 if (stack == 0)
3377 abort ();
3380 if (next)
3382 PREV_INSN (next) = prev;
3383 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3384 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3386 else if (last_insn == insn)
3387 last_insn = prev;
3388 else
3390 struct sequence_stack *stack = seq_stack;
3391 /* Scan all pending sequences too. */
3392 for (; stack; stack = stack->next)
3393 if (insn == stack->last)
3395 stack->last = prev;
3396 break;
3399 if (stack == 0)
3400 abort ();
3402 if (basic_block_for_insn
3403 && (unsigned int) INSN_UID (insn) < basic_block_for_insn->num_elements
3404 && (bb = BLOCK_FOR_INSN (insn)))
3406 if (INSN_P (insn))
3407 bb->flags |= BB_DIRTY;
3408 if (bb->head == insn)
3410 /* Never ever delete the basic block note without deleting whole
3411 basic block. */
3412 if (GET_CODE (insn) == NOTE)
3413 abort ();
3414 bb->head = next;
3416 if (bb->end == insn)
3417 bb->end = prev;
3421 /* Delete all insns made since FROM.
3422 FROM becomes the new last instruction. */
3424 void
3425 delete_insns_since (from)
3426 rtx from;
3428 if (from == 0)
3429 first_insn = 0;
3430 else
3431 NEXT_INSN (from) = 0;
3432 last_insn = from;
3435 /* This function is deprecated, please use sequences instead.
3437 Move a consecutive bunch of insns to a different place in the chain.
3438 The insns to be moved are those between FROM and TO.
3439 They are moved to a new position after the insn AFTER.
3440 AFTER must not be FROM or TO or any insn in between.
3442 This function does not know about SEQUENCEs and hence should not be
3443 called after delay-slot filling has been done. */
3445 void
3446 reorder_insns_nobb (from, to, after)
3447 rtx from, to, after;
3449 /* Splice this bunch out of where it is now. */
3450 if (PREV_INSN (from))
3451 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3452 if (NEXT_INSN (to))
3453 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3454 if (last_insn == to)
3455 last_insn = PREV_INSN (from);
3456 if (first_insn == from)
3457 first_insn = NEXT_INSN (to);
3459 /* Make the new neighbors point to it and it to them. */
3460 if (NEXT_INSN (after))
3461 PREV_INSN (NEXT_INSN (after)) = to;
3463 NEXT_INSN (to) = NEXT_INSN (after);
3464 PREV_INSN (from) = after;
3465 NEXT_INSN (after) = from;
3466 if (after == last_insn)
3467 last_insn = to;
3470 /* Same as function above, but take care to update BB boundaries. */
3471 void
3472 reorder_insns (from, to, after)
3473 rtx from, to, after;
3475 rtx prev = PREV_INSN (from);
3476 basic_block bb, bb2;
3478 reorder_insns_nobb (from, to, after);
3480 if (basic_block_for_insn
3481 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3482 && (bb = BLOCK_FOR_INSN (after)))
3484 rtx x;
3485 bb->flags |= BB_DIRTY;
3487 if (basic_block_for_insn
3488 && ((unsigned int) INSN_UID (from)
3489 < basic_block_for_insn->num_elements)
3490 && (bb2 = BLOCK_FOR_INSN (from)))
3492 if (bb2->end == to)
3493 bb2->end = prev;
3494 bb2->flags |= BB_DIRTY;
3497 if (bb->end == after)
3498 bb->end = to;
3500 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3501 set_block_for_insn (x, bb);
3505 /* Return the line note insn preceding INSN. */
3507 static rtx
3508 find_line_note (insn)
3509 rtx insn;
3511 if (no_line_numbers)
3512 return 0;
3514 for (; insn; insn = PREV_INSN (insn))
3515 if (GET_CODE (insn) == NOTE
3516 && NOTE_LINE_NUMBER (insn) >= 0)
3517 break;
3519 return insn;
3522 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3523 of the moved insns when debugging. This may insert a note between AFTER
3524 and FROM, and another one after TO. */
3526 void
3527 reorder_insns_with_line_notes (from, to, after)
3528 rtx from, to, after;
3530 rtx from_line = find_line_note (from);
3531 rtx after_line = find_line_note (after);
3533 reorder_insns (from, to, after);
3535 if (from_line == after_line)
3536 return;
3538 if (from_line)
3539 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3540 NOTE_LINE_NUMBER (from_line),
3541 after);
3542 if (after_line)
3543 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3544 NOTE_LINE_NUMBER (after_line),
3545 to);
3548 /* Remove unnecessary notes from the instruction stream. */
3550 void
3551 remove_unnecessary_notes ()
3553 rtx block_stack = NULL_RTX;
3554 rtx eh_stack = NULL_RTX;
3555 rtx insn;
3556 rtx next;
3557 rtx tmp;
3559 /* We must not remove the first instruction in the function because
3560 the compiler depends on the first instruction being a note. */
3561 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3563 /* Remember what's next. */
3564 next = NEXT_INSN (insn);
3566 /* We're only interested in notes. */
3567 if (GET_CODE (insn) != NOTE)
3568 continue;
3570 switch (NOTE_LINE_NUMBER (insn))
3572 case NOTE_INSN_DELETED:
3573 case NOTE_INSN_LOOP_END_TOP_COND:
3574 remove_insn (insn);
3575 break;
3577 case NOTE_INSN_EH_REGION_BEG:
3578 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3579 break;
3581 case NOTE_INSN_EH_REGION_END:
3582 /* Too many end notes. */
3583 if (eh_stack == NULL_RTX)
3584 abort ();
3585 /* Mismatched nesting. */
3586 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3587 abort ();
3588 tmp = eh_stack;
3589 eh_stack = XEXP (eh_stack, 1);
3590 free_INSN_LIST_node (tmp);
3591 break;
3593 case NOTE_INSN_BLOCK_BEG:
3594 /* By now, all notes indicating lexical blocks should have
3595 NOTE_BLOCK filled in. */
3596 if (NOTE_BLOCK (insn) == NULL_TREE)
3597 abort ();
3598 block_stack = alloc_INSN_LIST (insn, block_stack);
3599 break;
3601 case NOTE_INSN_BLOCK_END:
3602 /* Too many end notes. */
3603 if (block_stack == NULL_RTX)
3604 abort ();
3605 /* Mismatched nesting. */
3606 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3607 abort ();
3608 tmp = block_stack;
3609 block_stack = XEXP (block_stack, 1);
3610 free_INSN_LIST_node (tmp);
3612 /* Scan back to see if there are any non-note instructions
3613 between INSN and the beginning of this block. If not,
3614 then there is no PC range in the generated code that will
3615 actually be in this block, so there's no point in
3616 remembering the existence of the block. */
3617 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3619 /* This block contains a real instruction. Note that we
3620 don't include labels; if the only thing in the block
3621 is a label, then there are still no PC values that
3622 lie within the block. */
3623 if (INSN_P (tmp))
3624 break;
3626 /* We're only interested in NOTEs. */
3627 if (GET_CODE (tmp) != NOTE)
3628 continue;
3630 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3632 /* We just verified that this BLOCK matches us with
3633 the block_stack check above. Never delete the
3634 BLOCK for the outermost scope of the function; we
3635 can refer to names from that scope even if the
3636 block notes are messed up. */
3637 if (! is_body_block (NOTE_BLOCK (insn))
3638 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3640 remove_insn (tmp);
3641 remove_insn (insn);
3643 break;
3645 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3646 /* There's a nested block. We need to leave the
3647 current block in place since otherwise the debugger
3648 wouldn't be able to show symbols from our block in
3649 the nested block. */
3650 break;
3655 /* Too many begin notes. */
3656 if (block_stack || eh_stack)
3657 abort ();
3661 /* Emit an insn of given code and pattern
3662 at a specified place within the doubly-linked list. */
3664 /* Make an instruction with body PATTERN
3665 and output it before the instruction BEFORE. */
3668 emit_insn_before (pattern, before)
3669 rtx pattern, before;
3671 rtx insn = before;
3673 if (GET_CODE (pattern) == SEQUENCE)
3675 int i;
3677 for (i = 0; i < XVECLEN (pattern, 0); i++)
3679 insn = XVECEXP (pattern, 0, i);
3680 add_insn_before (insn, before);
3683 else
3685 insn = make_insn_raw (pattern);
3686 add_insn_before (insn, before);
3689 return insn;
3692 /* Make an instruction with body PATTERN and code JUMP_INSN
3693 and output it before the instruction BEFORE. */
3696 emit_jump_insn_before (pattern, before)
3697 rtx pattern, before;
3699 rtx insn;
3701 if (GET_CODE (pattern) == SEQUENCE)
3702 insn = emit_insn_before (pattern, before);
3703 else
3705 insn = make_jump_insn_raw (pattern);
3706 add_insn_before (insn, before);
3709 return insn;
3712 /* Make an instruction with body PATTERN and code CALL_INSN
3713 and output it before the instruction BEFORE. */
3716 emit_call_insn_before (pattern, before)
3717 rtx pattern, before;
3719 rtx insn;
3721 if (GET_CODE (pattern) == SEQUENCE)
3722 insn = emit_insn_before (pattern, before);
3723 else
3725 insn = make_call_insn_raw (pattern);
3726 add_insn_before (insn, before);
3727 PUT_CODE (insn, CALL_INSN);
3730 return insn;
3733 /* Make an insn of code BARRIER
3734 and output it before the insn BEFORE. */
3737 emit_barrier_before (before)
3738 rtx before;
3740 rtx insn = rtx_alloc (BARRIER);
3742 INSN_UID (insn) = cur_insn_uid++;
3744 add_insn_before (insn, before);
3745 return insn;
3748 /* Emit the label LABEL before the insn BEFORE. */
3751 emit_label_before (label, before)
3752 rtx label, before;
3754 /* This can be called twice for the same label as a result of the
3755 confusion that follows a syntax error! So make it harmless. */
3756 if (INSN_UID (label) == 0)
3758 INSN_UID (label) = cur_insn_uid++;
3759 add_insn_before (label, before);
3762 return label;
3765 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3768 emit_note_before (subtype, before)
3769 int subtype;
3770 rtx before;
3772 rtx note = rtx_alloc (NOTE);
3773 INSN_UID (note) = cur_insn_uid++;
3774 NOTE_SOURCE_FILE (note) = 0;
3775 NOTE_LINE_NUMBER (note) = subtype;
3777 add_insn_before (note, before);
3778 return note;
3781 /* Make an insn of code INSN with body PATTERN
3782 and output it after the insn AFTER. */
3785 emit_insn_after (pattern, after)
3786 rtx pattern, after;
3788 rtx insn = after;
3790 if (GET_CODE (pattern) == SEQUENCE)
3792 int i;
3794 for (i = 0; i < XVECLEN (pattern, 0); i++)
3796 insn = XVECEXP (pattern, 0, i);
3797 add_insn_after (insn, after);
3798 after = insn;
3801 else
3803 insn = make_insn_raw (pattern);
3804 add_insn_after (insn, after);
3807 return insn;
3810 /* Similar to emit_insn_after, except that line notes are to be inserted so
3811 as to act as if this insn were at FROM. */
3813 void
3814 emit_insn_after_with_line_notes (pattern, after, from)
3815 rtx pattern, after, from;
3817 rtx from_line = find_line_note (from);
3818 rtx after_line = find_line_note (after);
3819 rtx insn = emit_insn_after (pattern, after);
3821 if (from_line)
3822 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3823 NOTE_LINE_NUMBER (from_line),
3824 after);
3826 if (after_line)
3827 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3828 NOTE_LINE_NUMBER (after_line),
3829 insn);
3832 /* Make an insn of code JUMP_INSN with body PATTERN
3833 and output it after the insn AFTER. */
3836 emit_jump_insn_after (pattern, after)
3837 rtx pattern, after;
3839 rtx insn;
3841 if (GET_CODE (pattern) == SEQUENCE)
3842 insn = emit_insn_after (pattern, after);
3843 else
3845 insn = make_jump_insn_raw (pattern);
3846 add_insn_after (insn, after);
3849 return insn;
3852 /* Make an insn of code BARRIER
3853 and output it after the insn AFTER. */
3856 emit_barrier_after (after)
3857 rtx after;
3859 rtx insn = rtx_alloc (BARRIER);
3861 INSN_UID (insn) = cur_insn_uid++;
3863 add_insn_after (insn, after);
3864 return insn;
3867 /* Emit the label LABEL after the insn AFTER. */
3870 emit_label_after (label, after)
3871 rtx label, after;
3873 /* This can be called twice for the same label
3874 as a result of the confusion that follows a syntax error!
3875 So make it harmless. */
3876 if (INSN_UID (label) == 0)
3878 INSN_UID (label) = cur_insn_uid++;
3879 add_insn_after (label, after);
3882 return label;
3885 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3888 emit_note_after (subtype, after)
3889 int subtype;
3890 rtx after;
3892 rtx note = rtx_alloc (NOTE);
3893 INSN_UID (note) = cur_insn_uid++;
3894 NOTE_SOURCE_FILE (note) = 0;
3895 NOTE_LINE_NUMBER (note) = subtype;
3896 add_insn_after (note, after);
3897 return note;
3900 /* Emit a line note for FILE and LINE after the insn AFTER. */
3903 emit_line_note_after (file, line, after)
3904 const char *file;
3905 int line;
3906 rtx after;
3908 rtx note;
3910 if (no_line_numbers && line > 0)
3912 cur_insn_uid++;
3913 return 0;
3916 note = rtx_alloc (NOTE);
3917 INSN_UID (note) = cur_insn_uid++;
3918 NOTE_SOURCE_FILE (note) = file;
3919 NOTE_LINE_NUMBER (note) = line;
3920 add_insn_after (note, after);
3921 return note;
3924 /* Make an insn of code INSN with pattern PATTERN
3925 and add it to the end of the doubly-linked list.
3926 If PATTERN is a SEQUENCE, take the elements of it
3927 and emit an insn for each element.
3929 Returns the last insn emitted. */
3932 emit_insn (pattern)
3933 rtx pattern;
3935 rtx insn = last_insn;
3937 if (GET_CODE (pattern) == SEQUENCE)
3939 int i;
3941 for (i = 0; i < XVECLEN (pattern, 0); i++)
3943 insn = XVECEXP (pattern, 0, i);
3944 add_insn (insn);
3947 else
3949 insn = make_insn_raw (pattern);
3950 add_insn (insn);
3953 return insn;
3956 /* Emit the insns in a chain starting with INSN.
3957 Return the last insn emitted. */
3960 emit_insns (insn)
3961 rtx insn;
3963 rtx last = 0;
3965 while (insn)
3967 rtx next = NEXT_INSN (insn);
3968 add_insn (insn);
3969 last = insn;
3970 insn = next;
3973 return last;
3976 /* Emit the insns in a chain starting with INSN and place them in front of
3977 the insn BEFORE. Return the last insn emitted. */
3980 emit_insns_before (insn, before)
3981 rtx insn;
3982 rtx before;
3984 rtx last = 0;
3986 while (insn)
3988 rtx next = NEXT_INSN (insn);
3989 add_insn_before (insn, before);
3990 last = insn;
3991 insn = next;
3994 return last;
3997 /* Emit the insns in a chain starting with FIRST and place them in back of
3998 the insn AFTER. Return the last insn emitted. */
4001 emit_insns_after (first, after)
4002 rtx first;
4003 rtx after;
4005 rtx last;
4006 rtx after_after;
4007 basic_block bb;
4009 if (!after)
4010 abort ();
4012 if (!first)
4013 return after;
4015 if (basic_block_for_insn
4016 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
4017 && (bb = BLOCK_FOR_INSN (after)))
4019 bb->flags |= BB_DIRTY;
4020 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4021 set_block_for_insn (last, bb);
4022 set_block_for_insn (last, bb);
4023 if (bb->end == after)
4024 bb->end = last;
4026 else
4027 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4028 continue;
4030 after_after = NEXT_INSN (after);
4032 NEXT_INSN (after) = first;
4033 PREV_INSN (first) = after;
4034 NEXT_INSN (last) = after_after;
4035 if (after_after)
4036 PREV_INSN (after_after) = last;
4038 if (after == last_insn)
4039 last_insn = last;
4040 return last;
4043 /* Make an insn of code JUMP_INSN with pattern PATTERN
4044 and add it to the end of the doubly-linked list. */
4047 emit_jump_insn (pattern)
4048 rtx pattern;
4050 if (GET_CODE (pattern) == SEQUENCE)
4051 return emit_insn (pattern);
4052 else
4054 rtx insn = make_jump_insn_raw (pattern);
4055 add_insn (insn);
4056 return insn;
4060 /* Make an insn of code CALL_INSN with pattern PATTERN
4061 and add it to the end of the doubly-linked list. */
4064 emit_call_insn (pattern)
4065 rtx pattern;
4067 if (GET_CODE (pattern) == SEQUENCE)
4068 return emit_insn (pattern);
4069 else
4071 rtx insn = make_call_insn_raw (pattern);
4072 add_insn (insn);
4073 PUT_CODE (insn, CALL_INSN);
4074 return insn;
4078 /* Add the label LABEL to the end of the doubly-linked list. */
4081 emit_label (label)
4082 rtx label;
4084 /* This can be called twice for the same label
4085 as a result of the confusion that follows a syntax error!
4086 So make it harmless. */
4087 if (INSN_UID (label) == 0)
4089 INSN_UID (label) = cur_insn_uid++;
4090 add_insn (label);
4092 return label;
4095 /* Make an insn of code BARRIER
4096 and add it to the end of the doubly-linked list. */
4099 emit_barrier ()
4101 rtx barrier = rtx_alloc (BARRIER);
4102 INSN_UID (barrier) = cur_insn_uid++;
4103 add_insn (barrier);
4104 return barrier;
4107 /* Make an insn of code NOTE
4108 with data-fields specified by FILE and LINE
4109 and add it to the end of the doubly-linked list,
4110 but only if line-numbers are desired for debugging info. */
4113 emit_line_note (file, line)
4114 const char *file;
4115 int line;
4117 set_file_and_line_for_stmt (file, line);
4119 #if 0
4120 if (no_line_numbers)
4121 return 0;
4122 #endif
4124 return emit_note (file, line);
4127 /* Make an insn of code NOTE
4128 with data-fields specified by FILE and LINE
4129 and add it to the end of the doubly-linked list.
4130 If it is a line-number NOTE, omit it if it matches the previous one. */
4133 emit_note (file, line)
4134 const char *file;
4135 int line;
4137 rtx note;
4139 if (line > 0)
4141 if (file && last_filename && !strcmp (file, last_filename)
4142 && line == last_linenum)
4143 return 0;
4144 last_filename = file;
4145 last_linenum = line;
4148 if (no_line_numbers && line > 0)
4150 cur_insn_uid++;
4151 return 0;
4154 note = rtx_alloc (NOTE);
4155 INSN_UID (note) = cur_insn_uid++;
4156 NOTE_SOURCE_FILE (note) = file;
4157 NOTE_LINE_NUMBER (note) = line;
4158 add_insn (note);
4159 return note;
4162 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4165 emit_line_note_force (file, line)
4166 const char *file;
4167 int line;
4169 last_linenum = -1;
4170 return emit_line_note (file, line);
4173 /* Cause next statement to emit a line note even if the line number
4174 has not changed. This is used at the beginning of a function. */
4176 void
4177 force_next_line_note ()
4179 last_linenum = -1;
4182 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4183 note of this type already exists, remove it first. */
4186 set_unique_reg_note (insn, kind, datum)
4187 rtx insn;
4188 enum reg_note kind;
4189 rtx datum;
4191 rtx note = find_reg_note (insn, kind, NULL_RTX);
4193 switch (kind)
4195 case REG_EQUAL:
4196 case REG_EQUIV:
4197 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4198 has multiple sets (some callers assume single_set
4199 means the insn only has one set, when in fact it
4200 means the insn only has one * useful * set). */
4201 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4203 if (note)
4204 abort ();
4205 return NULL_RTX;
4208 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4209 It serves no useful purpose and breaks eliminate_regs. */
4210 if (GET_CODE (datum) == ASM_OPERANDS)
4211 return NULL_RTX;
4212 break;
4214 default:
4215 break;
4218 if (note)
4220 XEXP (note, 0) = datum;
4221 return note;
4224 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4225 return REG_NOTES (insn);
4228 /* Return an indication of which type of insn should have X as a body.
4229 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4231 enum rtx_code
4232 classify_insn (x)
4233 rtx x;
4235 if (GET_CODE (x) == CODE_LABEL)
4236 return CODE_LABEL;
4237 if (GET_CODE (x) == CALL)
4238 return CALL_INSN;
4239 if (GET_CODE (x) == RETURN)
4240 return JUMP_INSN;
4241 if (GET_CODE (x) == SET)
4243 if (SET_DEST (x) == pc_rtx)
4244 return JUMP_INSN;
4245 else if (GET_CODE (SET_SRC (x)) == CALL)
4246 return CALL_INSN;
4247 else
4248 return INSN;
4250 if (GET_CODE (x) == PARALLEL)
4252 int j;
4253 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4254 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4255 return CALL_INSN;
4256 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4257 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4258 return JUMP_INSN;
4259 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4260 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4261 return CALL_INSN;
4263 return INSN;
4266 /* Emit the rtl pattern X as an appropriate kind of insn.
4267 If X is a label, it is simply added into the insn chain. */
4270 emit (x)
4271 rtx x;
4273 enum rtx_code code = classify_insn (x);
4275 if (code == CODE_LABEL)
4276 return emit_label (x);
4277 else if (code == INSN)
4278 return emit_insn (x);
4279 else if (code == JUMP_INSN)
4281 rtx insn = emit_jump_insn (x);
4282 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4283 return emit_barrier ();
4284 return insn;
4286 else if (code == CALL_INSN)
4287 return emit_call_insn (x);
4288 else
4289 abort ();
4292 /* Begin emitting insns to a sequence which can be packaged in an
4293 RTL_EXPR. If this sequence will contain something that might cause
4294 the compiler to pop arguments to function calls (because those
4295 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4296 details), use do_pending_stack_adjust before calling this function.
4297 That will ensure that the deferred pops are not accidentally
4298 emitted in the middle of this sequence. */
4300 void
4301 start_sequence ()
4303 struct sequence_stack *tem;
4305 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4307 tem->next = seq_stack;
4308 tem->first = first_insn;
4309 tem->last = last_insn;
4310 tem->sequence_rtl_expr = seq_rtl_expr;
4312 seq_stack = tem;
4314 first_insn = 0;
4315 last_insn = 0;
4318 /* Similarly, but indicate that this sequence will be placed in T, an
4319 RTL_EXPR. See the documentation for start_sequence for more
4320 information about how to use this function. */
4322 void
4323 start_sequence_for_rtl_expr (t)
4324 tree t;
4326 start_sequence ();
4328 seq_rtl_expr = t;
4331 /* Set up the insn chain starting with FIRST as the current sequence,
4332 saving the previously current one. See the documentation for
4333 start_sequence for more information about how to use this function. */
4335 void
4336 push_to_sequence (first)
4337 rtx first;
4339 rtx last;
4341 start_sequence ();
4343 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4345 first_insn = first;
4346 last_insn = last;
4349 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4351 void
4352 push_to_full_sequence (first, last)
4353 rtx first, last;
4355 start_sequence ();
4356 first_insn = first;
4357 last_insn = last;
4358 /* We really should have the end of the insn chain here. */
4359 if (last && NEXT_INSN (last))
4360 abort ();
4363 /* Set up the outer-level insn chain
4364 as the current sequence, saving the previously current one. */
4366 void
4367 push_topmost_sequence ()
4369 struct sequence_stack *stack, *top = NULL;
4371 start_sequence ();
4373 for (stack = seq_stack; stack; stack = stack->next)
4374 top = stack;
4376 first_insn = top->first;
4377 last_insn = top->last;
4378 seq_rtl_expr = top->sequence_rtl_expr;
4381 /* After emitting to the outer-level insn chain, update the outer-level
4382 insn chain, and restore the previous saved state. */
4384 void
4385 pop_topmost_sequence ()
4387 struct sequence_stack *stack, *top = NULL;
4389 for (stack = seq_stack; stack; stack = stack->next)
4390 top = stack;
4392 top->first = first_insn;
4393 top->last = last_insn;
4394 /* ??? Why don't we save seq_rtl_expr here? */
4396 end_sequence ();
4399 /* After emitting to a sequence, restore previous saved state.
4401 To get the contents of the sequence just made, you must call
4402 `gen_sequence' *before* calling here.
4404 If the compiler might have deferred popping arguments while
4405 generating this sequence, and this sequence will not be immediately
4406 inserted into the instruction stream, use do_pending_stack_adjust
4407 before calling gen_sequence. That will ensure that the deferred
4408 pops are inserted into this sequence, and not into some random
4409 location in the instruction stream. See INHIBIT_DEFER_POP for more
4410 information about deferred popping of arguments. */
4412 void
4413 end_sequence ()
4415 struct sequence_stack *tem = seq_stack;
4417 first_insn = tem->first;
4418 last_insn = tem->last;
4419 seq_rtl_expr = tem->sequence_rtl_expr;
4420 seq_stack = tem->next;
4422 free (tem);
4425 /* This works like end_sequence, but records the old sequence in FIRST
4426 and LAST. */
4428 void
4429 end_full_sequence (first, last)
4430 rtx *first, *last;
4432 *first = first_insn;
4433 *last = last_insn;
4434 end_sequence();
4437 /* Return 1 if currently emitting into a sequence. */
4440 in_sequence_p ()
4442 return seq_stack != 0;
4445 /* Generate a SEQUENCE rtx containing the insns already emitted
4446 to the current sequence.
4448 This is how the gen_... function from a DEFINE_EXPAND
4449 constructs the SEQUENCE that it returns. */
4452 gen_sequence ()
4454 rtx result;
4455 rtx tem;
4456 int i;
4457 int len;
4459 /* Count the insns in the chain. */
4460 len = 0;
4461 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4462 len++;
4464 /* If only one insn, return it rather than a SEQUENCE.
4465 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4466 the case of an empty list.)
4467 We only return the pattern of an insn if its code is INSN and it
4468 has no notes. This ensures that no information gets lost. */
4469 if (len == 1
4470 && ! RTX_FRAME_RELATED_P (first_insn)
4471 && GET_CODE (first_insn) == INSN
4472 /* Don't throw away any reg notes. */
4473 && REG_NOTES (first_insn) == 0)
4474 return PATTERN (first_insn);
4476 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4478 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4479 XVECEXP (result, 0, i) = tem;
4481 return result;
4484 /* Put the various virtual registers into REGNO_REG_RTX. */
4486 void
4487 init_virtual_regs (es)
4488 struct emit_status *es;
4490 rtx *ptr = es->x_regno_reg_rtx;
4491 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4492 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4493 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4494 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4495 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4498 void
4499 clear_emit_caches ()
4501 int i;
4503 /* Clear the start_sequence/gen_sequence cache. */
4504 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4505 sequence_result[i] = 0;
4506 free_insn = 0;
4509 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4510 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4511 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4512 static int copy_insn_n_scratches;
4514 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4515 copied an ASM_OPERANDS.
4516 In that case, it is the original input-operand vector. */
4517 static rtvec orig_asm_operands_vector;
4519 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4520 copied an ASM_OPERANDS.
4521 In that case, it is the copied input-operand vector. */
4522 static rtvec copy_asm_operands_vector;
4524 /* Likewise for the constraints vector. */
4525 static rtvec orig_asm_constraints_vector;
4526 static rtvec copy_asm_constraints_vector;
4528 /* Recursively create a new copy of an rtx for copy_insn.
4529 This function differs from copy_rtx in that it handles SCRATCHes and
4530 ASM_OPERANDs properly.
4531 Normally, this function is not used directly; use copy_insn as front end.
4532 However, you could first copy an insn pattern with copy_insn and then use
4533 this function afterwards to properly copy any REG_NOTEs containing
4534 SCRATCHes. */
4537 copy_insn_1 (orig)
4538 rtx orig;
4540 rtx copy;
4541 int i, j;
4542 RTX_CODE code;
4543 const char *format_ptr;
4545 code = GET_CODE (orig);
4547 switch (code)
4549 case REG:
4550 case QUEUED:
4551 case CONST_INT:
4552 case CONST_DOUBLE:
4553 case CONST_VECTOR:
4554 case SYMBOL_REF:
4555 case CODE_LABEL:
4556 case PC:
4557 case CC0:
4558 case ADDRESSOF:
4559 return orig;
4561 case SCRATCH:
4562 for (i = 0; i < copy_insn_n_scratches; i++)
4563 if (copy_insn_scratch_in[i] == orig)
4564 return copy_insn_scratch_out[i];
4565 break;
4567 case CONST:
4568 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4569 a LABEL_REF, it isn't sharable. */
4570 if (GET_CODE (XEXP (orig, 0)) == PLUS
4571 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4572 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4573 return orig;
4574 break;
4576 /* A MEM with a constant address is not sharable. The problem is that
4577 the constant address may need to be reloaded. If the mem is shared,
4578 then reloading one copy of this mem will cause all copies to appear
4579 to have been reloaded. */
4581 default:
4582 break;
4585 copy = rtx_alloc (code);
4587 /* Copy the various flags, and other information. We assume that
4588 all fields need copying, and then clear the fields that should
4589 not be copied. That is the sensible default behavior, and forces
4590 us to explicitly document why we are *not* copying a flag. */
4591 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4593 /* We do not copy the USED flag, which is used as a mark bit during
4594 walks over the RTL. */
4595 copy->used = 0;
4597 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4598 if (GET_RTX_CLASS (code) == 'i')
4600 copy->jump = 0;
4601 copy->call = 0;
4602 copy->frame_related = 0;
4605 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4607 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4609 copy->fld[i] = orig->fld[i];
4610 switch (*format_ptr++)
4612 case 'e':
4613 if (XEXP (orig, i) != NULL)
4614 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4615 break;
4617 case 'E':
4618 case 'V':
4619 if (XVEC (orig, i) == orig_asm_constraints_vector)
4620 XVEC (copy, i) = copy_asm_constraints_vector;
4621 else if (XVEC (orig, i) == orig_asm_operands_vector)
4622 XVEC (copy, i) = copy_asm_operands_vector;
4623 else if (XVEC (orig, i) != NULL)
4625 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4626 for (j = 0; j < XVECLEN (copy, i); j++)
4627 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4629 break;
4631 case 't':
4632 case 'w':
4633 case 'i':
4634 case 's':
4635 case 'S':
4636 case 'u':
4637 case '0':
4638 /* These are left unchanged. */
4639 break;
4641 default:
4642 abort ();
4646 if (code == SCRATCH)
4648 i = copy_insn_n_scratches++;
4649 if (i >= MAX_RECOG_OPERANDS)
4650 abort ();
4651 copy_insn_scratch_in[i] = orig;
4652 copy_insn_scratch_out[i] = copy;
4654 else if (code == ASM_OPERANDS)
4656 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4657 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4658 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4659 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4662 return copy;
4665 /* Create a new copy of an rtx.
4666 This function differs from copy_rtx in that it handles SCRATCHes and
4667 ASM_OPERANDs properly.
4668 INSN doesn't really have to be a full INSN; it could be just the
4669 pattern. */
4671 copy_insn (insn)
4672 rtx insn;
4674 copy_insn_n_scratches = 0;
4675 orig_asm_operands_vector = 0;
4676 orig_asm_constraints_vector = 0;
4677 copy_asm_operands_vector = 0;
4678 copy_asm_constraints_vector = 0;
4679 return copy_insn_1 (insn);
4682 /* Initialize data structures and variables in this file
4683 before generating rtl for each function. */
4685 void
4686 init_emit ()
4688 struct function *f = cfun;
4690 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4691 first_insn = NULL;
4692 last_insn = NULL;
4693 seq_rtl_expr = NULL;
4694 cur_insn_uid = 1;
4695 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4696 last_linenum = 0;
4697 last_filename = 0;
4698 first_label_num = label_num;
4699 last_label_num = 0;
4700 seq_stack = NULL;
4702 clear_emit_caches ();
4704 /* Init the tables that describe all the pseudo regs. */
4706 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4708 f->emit->regno_pointer_align
4709 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4710 sizeof (unsigned char));
4712 regno_reg_rtx
4713 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4715 f->emit->regno_decl
4716 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4718 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4719 init_virtual_regs (f->emit);
4721 /* Indicate that the virtual registers and stack locations are
4722 all pointers. */
4723 REG_POINTER (stack_pointer_rtx) = 1;
4724 REG_POINTER (frame_pointer_rtx) = 1;
4725 REG_POINTER (hard_frame_pointer_rtx) = 1;
4726 REG_POINTER (arg_pointer_rtx) = 1;
4728 REG_POINTER (virtual_incoming_args_rtx) = 1;
4729 REG_POINTER (virtual_stack_vars_rtx) = 1;
4730 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4731 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4732 REG_POINTER (virtual_cfa_rtx) = 1;
4734 #ifdef STACK_BOUNDARY
4735 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4736 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4737 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4738 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4740 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4741 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4742 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4743 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4744 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4745 #endif
4747 #ifdef INIT_EXPANDERS
4748 INIT_EXPANDERS;
4749 #endif
4752 /* Mark SS for GC. */
4754 static void
4755 mark_sequence_stack (ss)
4756 struct sequence_stack *ss;
4758 while (ss)
4760 ggc_mark_rtx (ss->first);
4761 ggc_mark_tree (ss->sequence_rtl_expr);
4762 ss = ss->next;
4766 /* Mark ES for GC. */
4768 void
4769 mark_emit_status (es)
4770 struct emit_status *es;
4772 rtx *r;
4773 tree *t;
4774 int i;
4776 if (es == 0)
4777 return;
4779 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4780 t = es->regno_decl;
4781 i > 0; --i, ++r, ++t)
4783 ggc_mark_rtx (*r);
4784 ggc_mark_tree (*t);
4787 mark_sequence_stack (es->sequence_stack);
4788 ggc_mark_tree (es->sequence_rtl_expr);
4789 ggc_mark_rtx (es->x_first_insn);
4792 /* Generate the constant 0. */
4794 static rtx
4795 gen_const_vector_0 (mode)
4796 enum machine_mode mode;
4798 rtx tem;
4799 rtvec v;
4800 int units, i;
4801 enum machine_mode inner;
4803 units = GET_MODE_NUNITS (mode);
4804 inner = GET_MODE_INNER (mode);
4806 v = rtvec_alloc (units);
4808 /* We need to call this function after we to set CONST0_RTX first. */
4809 if (!CONST0_RTX (inner))
4810 abort ();
4812 for (i = 0; i < units; ++i)
4813 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4815 tem = gen_rtx_CONST_VECTOR (mode, v);
4816 return tem;
4819 /* Create some permanent unique rtl objects shared between all functions.
4820 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4822 void
4823 init_emit_once (line_numbers)
4824 int line_numbers;
4826 int i;
4827 enum machine_mode mode;
4828 enum machine_mode double_mode;
4830 /* Initialize the CONST_INT and memory attribute hash tables. */
4831 const_int_htab = htab_create (37, const_int_htab_hash,
4832 const_int_htab_eq, NULL);
4833 ggc_add_deletable_htab (const_int_htab, 0, 0);
4835 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4836 mem_attrs_htab_eq, NULL);
4837 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4839 no_line_numbers = ! line_numbers;
4841 /* Compute the word and byte modes. */
4843 byte_mode = VOIDmode;
4844 word_mode = VOIDmode;
4845 double_mode = VOIDmode;
4847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4848 mode = GET_MODE_WIDER_MODE (mode))
4850 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4851 && byte_mode == VOIDmode)
4852 byte_mode = mode;
4854 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4855 && word_mode == VOIDmode)
4856 word_mode = mode;
4859 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4860 mode = GET_MODE_WIDER_MODE (mode))
4862 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4863 && double_mode == VOIDmode)
4864 double_mode = mode;
4867 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4869 /* Assign register numbers to the globally defined register rtx.
4870 This must be done at runtime because the register number field
4871 is in a union and some compilers can't initialize unions. */
4873 pc_rtx = gen_rtx (PC, VOIDmode);
4874 cc0_rtx = gen_rtx (CC0, VOIDmode);
4875 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4876 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4877 if (hard_frame_pointer_rtx == 0)
4878 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4879 HARD_FRAME_POINTER_REGNUM);
4880 if (arg_pointer_rtx == 0)
4881 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4882 virtual_incoming_args_rtx =
4883 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4884 virtual_stack_vars_rtx =
4885 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4886 virtual_stack_dynamic_rtx =
4887 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4888 virtual_outgoing_args_rtx =
4889 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4890 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4892 /* These rtx must be roots if GC is enabled. */
4893 ggc_add_rtx_root (global_rtl, GR_MAX);
4895 #ifdef INIT_EXPANDERS
4896 /* This is to initialize {init|mark|free}_machine_status before the first
4897 call to push_function_context_to. This is needed by the Chill front
4898 end which calls push_function_context_to before the first call to
4899 init_function_start. */
4900 INIT_EXPANDERS;
4901 #endif
4903 /* Create the unique rtx's for certain rtx codes and operand values. */
4905 /* Don't use gen_rtx here since gen_rtx in this case
4906 tries to use these variables. */
4907 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4908 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4909 gen_rtx_raw_CONST_INT (VOIDmode, i);
4910 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4912 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4913 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4914 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4915 else
4916 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4918 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4919 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4920 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4921 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4923 for (i = 0; i <= 2; i++)
4925 REAL_VALUE_TYPE *r =
4926 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
4928 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4929 mode = GET_MODE_WIDER_MODE (mode))
4931 rtx tem = rtx_alloc (CONST_DOUBLE);
4933 /* Can't use CONST_DOUBLE_FROM_REAL_VALUE here; that uses the
4934 tables we're setting up right now. */
4935 memcpy (&CONST_DOUBLE_LOW (tem), r, sizeof (REAL_VALUE_TYPE));
4936 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4937 PUT_MODE (tem, mode);
4939 const_tiny_rtx[i][(int) mode] = tem;
4942 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4944 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4945 mode = GET_MODE_WIDER_MODE (mode))
4946 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4949 mode != VOIDmode;
4950 mode = GET_MODE_WIDER_MODE (mode))
4951 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
4955 mode != VOIDmode;
4956 mode = GET_MODE_WIDER_MODE (mode))
4957 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
4959 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
4960 mode != VOIDmode;
4961 mode = GET_MODE_WIDER_MODE (mode))
4962 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
4964 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4965 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4966 const_tiny_rtx[0][i] = const0_rtx;
4968 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4969 if (STORE_FLAG_VALUE == 1)
4970 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4972 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4973 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4974 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4975 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4976 ggc_add_rtx_root (&const_true_rtx, 1);
4978 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4979 return_address_pointer_rtx
4980 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4981 #endif
4983 #ifdef STRUCT_VALUE
4984 struct_value_rtx = STRUCT_VALUE;
4985 #else
4986 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4987 #endif
4989 #ifdef STRUCT_VALUE_INCOMING
4990 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4991 #else
4992 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4993 struct_value_incoming_rtx
4994 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4995 #else
4996 struct_value_incoming_rtx = struct_value_rtx;
4997 #endif
4998 #endif
5000 #ifdef STATIC_CHAIN_REGNUM
5001 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5003 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5004 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5005 static_chain_incoming_rtx
5006 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5007 else
5008 #endif
5009 static_chain_incoming_rtx = static_chain_rtx;
5010 #endif
5012 #ifdef STATIC_CHAIN
5013 static_chain_rtx = STATIC_CHAIN;
5015 #ifdef STATIC_CHAIN_INCOMING
5016 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5017 #else
5018 static_chain_incoming_rtx = static_chain_rtx;
5019 #endif
5020 #endif
5022 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5023 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5025 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5026 ggc_add_rtx_root (&struct_value_rtx, 1);
5027 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5028 ggc_add_rtx_root (&static_chain_rtx, 1);
5029 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5030 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5033 /* Query and clear/ restore no_line_numbers. This is used by the
5034 switch / case handling in stmt.c to give proper line numbers in
5035 warnings about unreachable code. */
5038 force_line_numbers ()
5040 int old = no_line_numbers;
5042 no_line_numbers = 0;
5043 if (old)
5044 force_next_line_note ();
5045 return old;
5048 void
5049 restore_line_number_status (old_value)
5050 int old_value;
5052 no_line_numbers = old_value;