(in m32rx patch): Replace "." with "@." when preceeded by a capital letter
[official-gcc.git] / gcc / emit-rtl.c
blob5db9155260bcbaec5b70ddb6bdfe72624df67ca9
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
60 /* Commonly used modes. */
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
71 static int label_num = 1;
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
77 static int last_label_num;
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
82 static int base_label_num;
84 /* Nonzero means do not generate NOTEs for source line numbers. */
86 static int no_line_numbers;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these except perhaps the floating-point CONST_DOUBLEs
91 are unique; no other rtx-object will be equal to any of these. */
93 rtx global_rtl[GR_MAX];
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
146 static htab_t const_int_htab;
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
151 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
152 shortly thrown away. We use two mechanisms to prevent this waste:
154 For sizes up to 5 elements, we keep a SEQUENCE and its associated
155 rtvec for use by gen_sequence. One entry for each size is
156 sufficient because most cases are calls to gen_sequence followed by
157 immediately emitting the SEQUENCE. Reuse is safe since emitting a
158 sequence is destructive on the insn in it anyway and hence can't be
159 redone.
161 We do not bother to save this cached data over nested function calls.
162 Instead, we just reinitialize them. */
164 #define SEQUENCE_RESULT_SIZE 5
166 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
168 /* During RTL generation, we also keep a list of free INSN rtl codes. */
169 static rtx free_insn;
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_linenum (cfun->emit->x_last_linenum)
175 #define last_filename (cfun->emit->x_last_filename)
176 #define first_label_num (cfun->emit->x_first_label_num)
178 static rtx make_jump_insn_raw PARAMS ((rtx));
179 static rtx make_call_insn_raw PARAMS ((rtx));
180 static rtx find_line_note PARAMS ((rtx));
181 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
182 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
183 int));
184 static void unshare_all_rtl_1 PARAMS ((rtx));
185 static void unshare_all_decls PARAMS ((tree));
186 static void reset_used_decls PARAMS ((tree));
187 static void mark_label_nuses PARAMS ((rtx));
188 static hashval_t const_int_htab_hash PARAMS ((const void *));
189 static int const_int_htab_eq PARAMS ((const void *,
190 const void *));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static void mem_attrs_mark PARAMS ((const void *));
195 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
196 rtx, unsigned int,
197 enum machine_mode));
199 /* Probability of the conditional branch currently proceeded by try_split.
200 Set to -1 otherwise. */
201 int split_branch_probability = -1;
203 /* Returns a hash code for X (which is a really a CONST_INT). */
205 static hashval_t
206 const_int_htab_hash (x)
207 const void *x;
209 return (hashval_t) INTVAL ((const struct rtx_def *) x);
212 /* Returns non-zero if the value represented by X (which is really a
213 CONST_INT) is the same as that given by Y (which is really a
214 HOST_WIDE_INT *). */
216 static int
217 const_int_htab_eq (x, y)
218 const void *x;
219 const void *y;
221 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
224 /* Returns a hash code for X (which is a really a mem_attrs *). */
226 static hashval_t
227 mem_attrs_htab_hash (x)
228 const void *x;
230 mem_attrs *p = (mem_attrs *) x;
232 return (p->alias ^ (p->align * 1000)
233 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
234 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
235 ^ (long) p->decl);
238 /* Returns non-zero if the value represented by X (which is really a
239 mem_attrs *) is the same as that given by Y (which is also really a
240 mem_attrs *). */
242 static int
243 mem_attrs_htab_eq (x, y)
244 const void *x;
245 const void *y;
247 mem_attrs *p = (mem_attrs *) x;
248 mem_attrs *q = (mem_attrs *) y;
250 return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
251 && p->size == q->size && p->align == q->align);
254 /* This routine is called when we determine that we need a mem_attrs entry.
255 It marks the associated decl and RTL as being used, if present. */
257 static void
258 mem_attrs_mark (x)
259 const void *x;
261 mem_attrs *p = (mem_attrs *) x;
263 if (p->decl)
264 ggc_mark_tree (p->decl);
266 if (p->offset)
267 ggc_mark_rtx (p->offset);
269 if (p->size)
270 ggc_mark_rtx (p->size);
273 /* Allocate a new mem_attrs structure and insert it into the hash table if
274 one identical to it is not already in the table. We are doing this for
275 MEM of mode MODE. */
277 static mem_attrs *
278 get_mem_attrs (alias, decl, offset, size, align, mode)
279 HOST_WIDE_INT alias;
280 tree decl;
281 rtx offset;
282 rtx size;
283 unsigned int align;
284 enum machine_mode mode;
286 mem_attrs attrs;
287 void **slot;
289 /* If everything is the default, we can just return zero. */
290 if (alias == 0 && decl == 0 && offset == 0
291 && (size == 0
292 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
293 && (align == BITS_PER_UNIT
294 || (mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
295 return 0;
297 attrs.alias = alias;
298 attrs.decl = decl;
299 attrs.offset = offset;
300 attrs.size = size;
301 attrs.align = align;
303 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
304 if (*slot == 0)
306 *slot = ggc_alloc (sizeof (mem_attrs));
307 memcpy (*slot, &attrs, sizeof (mem_attrs));
310 return *slot;
313 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
314 don't attempt to share with the various global pieces of rtl (such as
315 frame_pointer_rtx). */
318 gen_raw_REG (mode, regno)
319 enum machine_mode mode;
320 int regno;
322 rtx x = gen_rtx_raw_REG (mode, regno);
323 ORIGINAL_REGNO (x) = regno;
324 return x;
327 /* There are some RTL codes that require special attention; the generation
328 functions do the raw handling. If you add to this list, modify
329 special_rtx in gengenrtl.c as well. */
332 gen_rtx_CONST_INT (mode, arg)
333 enum machine_mode mode ATTRIBUTE_UNUSED;
334 HOST_WIDE_INT arg;
336 void **slot;
338 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
339 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
341 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
342 if (const_true_rtx && arg == STORE_FLAG_VALUE)
343 return const_true_rtx;
344 #endif
346 /* Look up the CONST_INT in the hash table. */
347 slot = htab_find_slot_with_hash (const_int_htab, &arg,
348 (hashval_t) arg, INSERT);
349 if (*slot == 0)
350 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
352 return (rtx) *slot;
355 /* CONST_DOUBLEs needs special handling because their length is known
356 only at run-time. */
359 gen_rtx_CONST_DOUBLE (mode, arg0, arg1)
360 enum machine_mode mode;
361 HOST_WIDE_INT arg0, arg1;
363 rtx r = rtx_alloc (CONST_DOUBLE);
364 int i;
366 PUT_MODE (r, mode);
367 X0EXP (r, 0) = NULL_RTX;
368 XWINT (r, 1) = arg0;
369 XWINT (r, 2) = arg1;
371 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 2; --i)
372 XWINT (r, i) = 0;
374 return r;
378 gen_rtx_REG (mode, regno)
379 enum machine_mode mode;
380 int regno;
382 /* In case the MD file explicitly references the frame pointer, have
383 all such references point to the same frame pointer. This is
384 used during frame pointer elimination to distinguish the explicit
385 references to these registers from pseudos that happened to be
386 assigned to them.
388 If we have eliminated the frame pointer or arg pointer, we will
389 be using it as a normal register, for example as a spill
390 register. In such cases, we might be accessing it in a mode that
391 is not Pmode and therefore cannot use the pre-allocated rtx.
393 Also don't do this when we are making new REGs in reload, since
394 we don't want to get confused with the real pointers. */
396 if (mode == Pmode && !reload_in_progress)
398 if (regno == FRAME_POINTER_REGNUM)
399 return frame_pointer_rtx;
400 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
401 if (regno == HARD_FRAME_POINTER_REGNUM)
402 return hard_frame_pointer_rtx;
403 #endif
404 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
405 if (regno == ARG_POINTER_REGNUM)
406 return arg_pointer_rtx;
407 #endif
408 #ifdef RETURN_ADDRESS_POINTER_REGNUM
409 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
410 return return_address_pointer_rtx;
411 #endif
412 if (regno == STACK_POINTER_REGNUM)
413 return stack_pointer_rtx;
416 return gen_raw_REG (mode, regno);
420 gen_rtx_MEM (mode, addr)
421 enum machine_mode mode;
422 rtx addr;
424 rtx rt = gen_rtx_raw_MEM (mode, addr);
426 /* This field is not cleared by the mere allocation of the rtx, so
427 we clear it here. */
428 MEM_ATTRS (rt) = 0;
430 return rt;
434 gen_rtx_SUBREG (mode, reg, offset)
435 enum machine_mode mode;
436 rtx reg;
437 int offset;
439 /* This is the most common failure type.
440 Catch it early so we can see who does it. */
441 if ((offset % GET_MODE_SIZE (mode)) != 0)
442 abort ();
444 /* This check isn't usable right now because combine will
445 throw arbitrary crap like a CALL into a SUBREG in
446 gen_lowpart_for_combine so we must just eat it. */
447 #if 0
448 /* Check for this too. */
449 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
450 abort ();
451 #endif
452 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
455 /* Generate a SUBREG representing the least-significant part of REG if MODE
456 is smaller than mode of REG, otherwise paradoxical SUBREG. */
459 gen_lowpart_SUBREG (mode, reg)
460 enum machine_mode mode;
461 rtx reg;
463 enum machine_mode inmode;
465 inmode = GET_MODE (reg);
466 if (inmode == VOIDmode)
467 inmode = mode;
468 return gen_rtx_SUBREG (mode, reg,
469 subreg_lowpart_offset (mode, inmode));
472 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
474 ** This routine generates an RTX of the size specified by
475 ** <code>, which is an RTX code. The RTX structure is initialized
476 ** from the arguments <element1> through <elementn>, which are
477 ** interpreted according to the specific RTX type's format. The
478 ** special machine mode associated with the rtx (if any) is specified
479 ** in <mode>.
481 ** gen_rtx can be invoked in a way which resembles the lisp-like
482 ** rtx it will generate. For example, the following rtx structure:
484 ** (plus:QI (mem:QI (reg:SI 1))
485 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
487 ** ...would be generated by the following C code:
489 ** gen_rtx (PLUS, QImode,
490 ** gen_rtx (MEM, QImode,
491 ** gen_rtx (REG, SImode, 1)),
492 ** gen_rtx (MEM, QImode,
493 ** gen_rtx (PLUS, SImode,
494 ** gen_rtx (REG, SImode, 2),
495 ** gen_rtx (REG, SImode, 3)))),
498 /*VARARGS2*/
500 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
502 int i; /* Array indices... */
503 const char *fmt; /* Current rtx's format... */
504 rtx rt_val; /* RTX to return to caller... */
506 VA_OPEN (p, mode);
507 VA_FIXEDARG (p, enum rtx_code, code);
508 VA_FIXEDARG (p, enum machine_mode, mode);
510 switch (code)
512 case CONST_INT:
513 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
514 break;
516 case CONST_DOUBLE:
518 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
519 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
521 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1);
523 break;
525 case REG:
526 rt_val = gen_rtx_REG (mode, va_arg (p, int));
527 break;
529 case MEM:
530 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
531 break;
533 default:
534 rt_val = rtx_alloc (code); /* Allocate the storage space. */
535 rt_val->mode = mode; /* Store the machine mode... */
537 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
538 for (i = 0; i < GET_RTX_LENGTH (code); i++)
540 switch (*fmt++)
542 case '0': /* Unused field. */
543 break;
545 case 'i': /* An integer? */
546 XINT (rt_val, i) = va_arg (p, int);
547 break;
549 case 'w': /* A wide integer? */
550 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
551 break;
553 case 's': /* A string? */
554 XSTR (rt_val, i) = va_arg (p, char *);
555 break;
557 case 'e': /* An expression? */
558 case 'u': /* An insn? Same except when printing. */
559 XEXP (rt_val, i) = va_arg (p, rtx);
560 break;
562 case 'E': /* An RTX vector? */
563 XVEC (rt_val, i) = va_arg (p, rtvec);
564 break;
566 case 'b': /* A bitmap? */
567 XBITMAP (rt_val, i) = va_arg (p, bitmap);
568 break;
570 case 't': /* A tree? */
571 XTREE (rt_val, i) = va_arg (p, tree);
572 break;
574 default:
575 abort ();
578 break;
581 VA_CLOSE (p);
582 return rt_val;
585 /* gen_rtvec (n, [rt1, ..., rtn])
587 ** This routine creates an rtvec and stores within it the
588 ** pointers to rtx's which are its arguments.
591 /*VARARGS1*/
592 rtvec
593 gen_rtvec VPARAMS ((int n, ...))
595 int i, save_n;
596 rtx *vector;
598 VA_OPEN (p, n);
599 VA_FIXEDARG (p, int, n);
601 if (n == 0)
602 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
604 vector = (rtx *) alloca (n * sizeof (rtx));
606 for (i = 0; i < n; i++)
607 vector[i] = va_arg (p, rtx);
609 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
610 save_n = n;
611 VA_CLOSE (p);
613 return gen_rtvec_v (save_n, vector);
616 rtvec
617 gen_rtvec_v (n, argp)
618 int n;
619 rtx *argp;
621 int i;
622 rtvec rt_val;
624 if (n == 0)
625 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
627 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
629 for (i = 0; i < n; i++)
630 rt_val->elem[i] = *argp++;
632 return rt_val;
635 /* Generate a REG rtx for a new pseudo register of mode MODE.
636 This pseudo is assigned the next sequential register number. */
639 gen_reg_rtx (mode)
640 enum machine_mode mode;
642 struct function *f = cfun;
643 rtx val;
645 /* Don't let anything called after initial flow analysis create new
646 registers. */
647 if (no_new_pseudos)
648 abort ();
650 if (generating_concat_p
651 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
652 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
654 /* For complex modes, don't make a single pseudo.
655 Instead, make a CONCAT of two pseudos.
656 This allows noncontiguous allocation of the real and imaginary parts,
657 which makes much better code. Besides, allocating DCmode
658 pseudos overstrains reload on some machines like the 386. */
659 rtx realpart, imagpart;
660 int size = GET_MODE_UNIT_SIZE (mode);
661 enum machine_mode partmode
662 = mode_for_size (size * BITS_PER_UNIT,
663 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
664 ? MODE_FLOAT : MODE_INT),
667 realpart = gen_reg_rtx (partmode);
668 imagpart = gen_reg_rtx (partmode);
669 return gen_rtx_CONCAT (mode, realpart, imagpart);
672 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
673 enough to have an element for this pseudo reg number. */
675 if (reg_rtx_no == f->emit->regno_pointer_align_length)
677 int old_size = f->emit->regno_pointer_align_length;
678 char *new;
679 rtx *new1;
680 tree *new2;
682 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
683 memset (new + old_size, 0, old_size);
684 f->emit->regno_pointer_align = (unsigned char *) new;
686 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
687 old_size * 2 * sizeof (rtx));
688 memset (new1 + old_size, 0, old_size * sizeof (rtx));
689 regno_reg_rtx = new1;
691 new2 = (tree *) xrealloc (f->emit->regno_decl,
692 old_size * 2 * sizeof (tree));
693 memset (new2 + old_size, 0, old_size * sizeof (tree));
694 f->emit->regno_decl = new2;
696 f->emit->regno_pointer_align_length = old_size * 2;
699 val = gen_raw_REG (mode, reg_rtx_no);
700 regno_reg_rtx[reg_rtx_no++] = val;
701 return val;
704 /* Identify REG (which may be a CONCAT) as a user register. */
706 void
707 mark_user_reg (reg)
708 rtx reg;
710 if (GET_CODE (reg) == CONCAT)
712 REG_USERVAR_P (XEXP (reg, 0)) = 1;
713 REG_USERVAR_P (XEXP (reg, 1)) = 1;
715 else if (GET_CODE (reg) == REG)
716 REG_USERVAR_P (reg) = 1;
717 else
718 abort ();
721 /* Identify REG as a probable pointer register and show its alignment
722 as ALIGN, if nonzero. */
724 void
725 mark_reg_pointer (reg, align)
726 rtx reg;
727 int align;
729 if (! REG_POINTER (reg))
731 REG_POINTER (reg) = 1;
733 if (align)
734 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
736 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
737 /* We can no-longer be sure just how aligned this pointer is */
738 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
741 /* Return 1 plus largest pseudo reg number used in the current function. */
744 max_reg_num ()
746 return reg_rtx_no;
749 /* Return 1 + the largest label number used so far in the current function. */
752 max_label_num ()
754 if (last_label_num && label_num == base_label_num)
755 return last_label_num;
756 return label_num;
759 /* Return first label number used in this function (if any were used). */
762 get_first_label_num ()
764 return first_label_num;
767 /* Return the final regno of X, which is a SUBREG of a hard
768 register. */
770 subreg_hard_regno (x, check_mode)
771 rtx x;
772 int check_mode;
774 enum machine_mode mode = GET_MODE (x);
775 unsigned int byte_offset, base_regno, final_regno;
776 rtx reg = SUBREG_REG (x);
778 /* This is where we attempt to catch illegal subregs
779 created by the compiler. */
780 if (GET_CODE (x) != SUBREG
781 || GET_CODE (reg) != REG)
782 abort ();
783 base_regno = REGNO (reg);
784 if (base_regno >= FIRST_PSEUDO_REGISTER)
785 abort ();
786 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
787 abort ();
789 /* Catch non-congruent offsets too. */
790 byte_offset = SUBREG_BYTE (x);
791 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
792 abort ();
794 final_regno = subreg_regno (x);
796 return final_regno;
799 /* Return a value representing some low-order bits of X, where the number
800 of low-order bits is given by MODE. Note that no conversion is done
801 between floating-point and fixed-point values, rather, the bit
802 representation is returned.
804 This function handles the cases in common between gen_lowpart, below,
805 and two variants in cse.c and combine.c. These are the cases that can
806 be safely handled at all points in the compilation.
808 If this is not a case we can handle, return 0. */
811 gen_lowpart_common (mode, x)
812 enum machine_mode mode;
813 rtx x;
815 int msize = GET_MODE_SIZE (mode);
816 int xsize = GET_MODE_SIZE (GET_MODE (x));
817 int offset = 0;
819 if (GET_MODE (x) == mode)
820 return x;
822 /* MODE must occupy no more words than the mode of X. */
823 if (GET_MODE (x) != VOIDmode
824 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
825 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
826 return 0;
828 offset = subreg_lowpart_offset (mode, GET_MODE (x));
830 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
831 && (GET_MODE_CLASS (mode) == MODE_INT
832 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
834 /* If we are getting the low-order part of something that has been
835 sign- or zero-extended, we can either just use the object being
836 extended or make a narrower extension. If we want an even smaller
837 piece than the size of the object being extended, call ourselves
838 recursively.
840 This case is used mostly by combine and cse. */
842 if (GET_MODE (XEXP (x, 0)) == mode)
843 return XEXP (x, 0);
844 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
845 return gen_lowpart_common (mode, XEXP (x, 0));
846 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
847 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
849 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
850 || GET_CODE (x) == CONCAT)
851 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
852 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
853 from the low-order part of the constant. */
854 else if ((GET_MODE_CLASS (mode) == MODE_INT
855 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
856 && GET_MODE (x) == VOIDmode
857 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
859 /* If MODE is twice the host word size, X is already the desired
860 representation. Otherwise, if MODE is wider than a word, we can't
861 do this. If MODE is exactly a word, return just one CONST_INT. */
863 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
864 return x;
865 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
866 return 0;
867 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
868 return (GET_CODE (x) == CONST_INT ? x
869 : GEN_INT (CONST_DOUBLE_LOW (x)));
870 else
872 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
873 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
874 : CONST_DOUBLE_LOW (x));
876 /* Sign extend to HOST_WIDE_INT. */
877 val = trunc_int_for_mode (val, mode);
879 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
880 : GEN_INT (val));
884 #ifndef REAL_ARITHMETIC
885 /* If X is an integral constant but we want it in floating-point, it
886 must be the case that we have a union of an integer and a floating-point
887 value. If the machine-parameters allow it, simulate that union here
888 and return the result. The two-word and single-word cases are
889 different. */
891 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
892 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
893 || flag_pretend_float)
894 && GET_MODE_CLASS (mode) == MODE_FLOAT
895 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
896 && GET_CODE (x) == CONST_INT
897 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
899 union {HOST_WIDE_INT i; float d; } u;
901 u.i = INTVAL (x);
902 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
904 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
905 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
906 || flag_pretend_float)
907 && GET_MODE_CLASS (mode) == MODE_FLOAT
908 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
909 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
910 && GET_MODE (x) == VOIDmode
911 && (sizeof (double) * HOST_BITS_PER_CHAR
912 == 2 * HOST_BITS_PER_WIDE_INT))
914 union {HOST_WIDE_INT i[2]; double d; } u;
915 HOST_WIDE_INT low, high;
917 if (GET_CODE (x) == CONST_INT)
918 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
919 else
920 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
922 #ifdef HOST_WORDS_BIG_ENDIAN
923 u.i[0] = high, u.i[1] = low;
924 #else
925 u.i[0] = low, u.i[1] = high;
926 #endif
928 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
931 /* Similarly, if this is converting a floating-point value into a
932 single-word integer. Only do this is the host and target parameters are
933 compatible. */
935 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
936 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
937 || flag_pretend_float)
938 && (GET_MODE_CLASS (mode) == MODE_INT
939 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
940 && GET_CODE (x) == CONST_DOUBLE
941 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
942 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
943 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
945 /* Similarly, if this is converting a floating-point value into a
946 two-word integer, we can do this one word at a time and make an
947 integer. Only do this is the host and target parameters are
948 compatible. */
950 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
951 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
952 || flag_pretend_float)
953 && (GET_MODE_CLASS (mode) == MODE_INT
954 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
955 && GET_CODE (x) == CONST_DOUBLE
956 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
957 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
959 rtx lowpart, highpart;
961 lowpart = constant_subword (x,
962 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
963 GET_MODE (x));
964 highpart = constant_subword (x,
965 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
966 GET_MODE (x));
967 if (lowpart && GET_CODE (lowpart) == CONST_INT
968 && highpart && GET_CODE (highpart) == CONST_INT)
969 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
971 #else /* ifndef REAL_ARITHMETIC */
973 /* When we have a FP emulator, we can handle all conversions between
974 FP and integer operands. This simplifies reload because it
975 doesn't have to deal with constructs like (subreg:DI
976 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
977 /* Single-precision floats are always 32-bits and double-precision
978 floats are always 64-bits. */
980 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
981 && GET_MODE_BITSIZE (mode) == 32
982 && GET_CODE (x) == CONST_INT)
984 REAL_VALUE_TYPE r;
985 HOST_WIDE_INT i;
987 i = INTVAL (x);
988 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
989 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
991 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
992 && GET_MODE_BITSIZE (mode) == 64
993 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
994 && GET_MODE (x) == VOIDmode)
996 REAL_VALUE_TYPE r;
997 HOST_WIDE_INT i[2];
998 HOST_WIDE_INT low, high;
1000 if (GET_CODE (x) == CONST_INT)
1002 low = INTVAL (x);
1003 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1005 else
1007 low = CONST_DOUBLE_LOW (x);
1008 high = CONST_DOUBLE_HIGH (x);
1011 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1012 target machine. */
1013 if (WORDS_BIG_ENDIAN)
1014 i[0] = high, i[1] = low;
1015 else
1016 i[0] = low, i[1] = high;
1018 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1019 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1021 else if ((GET_MODE_CLASS (mode) == MODE_INT
1022 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1023 && GET_CODE (x) == CONST_DOUBLE
1024 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1026 REAL_VALUE_TYPE r;
1027 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1028 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1030 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1031 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1033 case 32:
1034 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
1035 i[1 - endian] = 0;
1036 break;
1037 case 64:
1038 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
1039 break;
1040 case 96:
1041 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1042 i[3-3*endian] = 0;
1043 break;
1044 case 128:
1045 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1046 break;
1047 default:
1048 abort ();
1051 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1052 and return it. */
1053 #if HOST_BITS_PER_WIDE_INT == 32
1054 return immed_double_const (i[endian], i[1 - endian], mode);
1055 #else
1057 int c;
1059 if (HOST_BITS_PER_WIDE_INT != 64)
1060 abort ();
1062 for (c = 0; c < 4; c++)
1063 i[c] &= ~ (0L);
1065 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1067 case 32:
1068 case 64:
1069 return immed_double_const (((unsigned long) i[endian]) |
1070 (((HOST_WIDE_INT) i[1-endian]) << 32),
1071 0, mode);
1072 case 96:
1073 case 128:
1074 return immed_double_const (((unsigned long) i[endian*3]) |
1075 (((HOST_WIDE_INT) i[1+endian]) << 32),
1076 ((unsigned long) i[2-endian]) |
1077 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
1078 mode);
1079 default:
1080 abort ();
1083 #endif
1085 #endif /* ifndef REAL_ARITHMETIC */
1087 /* Otherwise, we can't do this. */
1088 return 0;
1091 /* Return the real part (which has mode MODE) of a complex value X.
1092 This always comes at the low address in memory. */
1095 gen_realpart (mode, x)
1096 enum machine_mode mode;
1097 rtx x;
1099 if (WORDS_BIG_ENDIAN
1100 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1101 && REG_P (x)
1102 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1103 internal_error
1104 ("Can't access real part of complex value in hard register");
1105 else if (WORDS_BIG_ENDIAN)
1106 return gen_highpart (mode, x);
1107 else
1108 return gen_lowpart (mode, x);
1111 /* Return the imaginary part (which has mode MODE) of a complex value X.
1112 This always comes at the high address in memory. */
1115 gen_imagpart (mode, x)
1116 enum machine_mode mode;
1117 rtx x;
1119 if (WORDS_BIG_ENDIAN)
1120 return gen_lowpart (mode, x);
1121 else if (! WORDS_BIG_ENDIAN
1122 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1123 && REG_P (x)
1124 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1125 internal_error
1126 ("can't access imaginary part of complex value in hard register");
1127 else
1128 return gen_highpart (mode, x);
1131 /* Return 1 iff X, assumed to be a SUBREG,
1132 refers to the real part of the complex value in its containing reg.
1133 Complex values are always stored with the real part in the first word,
1134 regardless of WORDS_BIG_ENDIAN. */
1137 subreg_realpart_p (x)
1138 rtx x;
1140 if (GET_CODE (x) != SUBREG)
1141 abort ();
1143 return ((unsigned int) SUBREG_BYTE (x)
1144 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1147 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1148 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1149 least-significant part of X.
1150 MODE specifies how big a part of X to return;
1151 it usually should not be larger than a word.
1152 If X is a MEM whose address is a QUEUED, the value may be so also. */
1155 gen_lowpart (mode, x)
1156 enum machine_mode mode;
1157 rtx x;
1159 rtx result = gen_lowpart_common (mode, x);
1161 if (result)
1162 return result;
1163 else if (GET_CODE (x) == REG)
1165 /* Must be a hard reg that's not valid in MODE. */
1166 result = gen_lowpart_common (mode, copy_to_reg (x));
1167 if (result == 0)
1168 abort ();
1169 return result;
1171 else if (GET_CODE (x) == MEM)
1173 /* The only additional case we can do is MEM. */
1174 int offset = 0;
1175 if (WORDS_BIG_ENDIAN)
1176 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1177 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1179 if (BYTES_BIG_ENDIAN)
1180 /* Adjust the address so that the address-after-the-data
1181 is unchanged. */
1182 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1183 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1185 return adjust_address (x, mode, offset);
1187 else if (GET_CODE (x) == ADDRESSOF)
1188 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1189 else
1190 abort ();
1193 /* Like `gen_lowpart', but refer to the most significant part.
1194 This is used to access the imaginary part of a complex number. */
1197 gen_highpart (mode, x)
1198 enum machine_mode mode;
1199 rtx x;
1201 unsigned int msize = GET_MODE_SIZE (mode);
1202 rtx result;
1204 /* This case loses if X is a subreg. To catch bugs early,
1205 complain if an invalid MODE is used even in other cases. */
1206 if (msize > UNITS_PER_WORD
1207 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1208 abort ();
1210 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1211 subreg_highpart_offset (mode, GET_MODE (x)));
1213 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1214 the target if we have a MEM. gen_highpart must return a valid operand,
1215 emitting code if necessary to do so. */
1216 if (GET_CODE (result) == MEM)
1217 result = validize_mem (result);
1219 if (!result)
1220 abort ();
1221 return result;
1224 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1225 be VOIDmode constant. */
1227 gen_highpart_mode (outermode, innermode, exp)
1228 enum machine_mode outermode, innermode;
1229 rtx exp;
1231 if (GET_MODE (exp) != VOIDmode)
1233 if (GET_MODE (exp) != innermode)
1234 abort ();
1235 return gen_highpart (outermode, exp);
1237 return simplify_gen_subreg (outermode, exp, innermode,
1238 subreg_highpart_offset (outermode, innermode));
1240 /* Return offset in bytes to get OUTERMODE low part
1241 of the value in mode INNERMODE stored in memory in target format. */
1243 unsigned int
1244 subreg_lowpart_offset (outermode, innermode)
1245 enum machine_mode outermode, innermode;
1247 unsigned int offset = 0;
1248 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1250 if (difference > 0)
1252 if (WORDS_BIG_ENDIAN)
1253 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1254 if (BYTES_BIG_ENDIAN)
1255 offset += difference % UNITS_PER_WORD;
1258 return offset;
1261 /* Return offset in bytes to get OUTERMODE high part
1262 of the value in mode INNERMODE stored in memory in target format. */
1263 unsigned int
1264 subreg_highpart_offset (outermode, innermode)
1265 enum machine_mode outermode, innermode;
1267 unsigned int offset = 0;
1268 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1270 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1271 abort ();
1273 if (difference > 0)
1275 if (! WORDS_BIG_ENDIAN)
1276 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1277 if (! BYTES_BIG_ENDIAN)
1278 offset += difference % UNITS_PER_WORD;
1281 return offset;
1284 /* Return 1 iff X, assumed to be a SUBREG,
1285 refers to the least significant part of its containing reg.
1286 If X is not a SUBREG, always return 1 (it is its own low part!). */
1289 subreg_lowpart_p (x)
1290 rtx x;
1292 if (GET_CODE (x) != SUBREG)
1293 return 1;
1294 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1295 return 0;
1297 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1298 == SUBREG_BYTE (x));
1302 /* Helper routine for all the constant cases of operand_subword.
1303 Some places invoke this directly. */
1306 constant_subword (op, offset, mode)
1307 rtx op;
1308 int offset;
1309 enum machine_mode mode;
1311 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1312 HOST_WIDE_INT val;
1314 /* If OP is already an integer word, return it. */
1315 if (GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1317 return op;
1319 #ifdef REAL_ARITHMETIC
1320 /* The output is some bits, the width of the target machine's word.
1321 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1322 host can't. */
1323 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1324 && GET_MODE_CLASS (mode) == MODE_FLOAT
1325 && GET_MODE_BITSIZE (mode) == 64
1326 && GET_CODE (op) == CONST_DOUBLE)
1328 long k[2];
1329 REAL_VALUE_TYPE rv;
1331 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1332 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1334 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1335 which the words are written depends on the word endianness.
1336 ??? This is a potential portability problem and should
1337 be fixed at some point.
1339 We must excercise caution with the sign bit. By definition there
1340 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1341 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1342 So we explicitly mask and sign-extend as necessary. */
1343 if (BITS_PER_WORD == 32)
1345 val = k[offset];
1346 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1347 return GEN_INT (val);
1349 #if HOST_BITS_PER_WIDE_INT >= 64
1350 else if (BITS_PER_WORD >= 64 && offset == 0)
1352 val = k[! WORDS_BIG_ENDIAN];
1353 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1354 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1355 return GEN_INT (val);
1357 #endif
1358 else if (BITS_PER_WORD == 16)
1360 val = k[offset >> 1];
1361 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1362 val >>= 16;
1363 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1364 return GEN_INT (val);
1366 else
1367 abort ();
1369 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1370 && GET_MODE_CLASS (mode) == MODE_FLOAT
1371 && GET_MODE_BITSIZE (mode) > 64
1372 && GET_CODE (op) == CONST_DOUBLE)
1374 long k[4];
1375 REAL_VALUE_TYPE rv;
1377 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1378 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1380 if (BITS_PER_WORD == 32)
1382 val = k[offset];
1383 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1384 return GEN_INT (val);
1386 #if HOST_BITS_PER_WIDE_INT >= 64
1387 else if (BITS_PER_WORD >= 64 && offset <= 1)
1389 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1390 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1391 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1392 return GEN_INT (val);
1394 #endif
1395 else
1396 abort ();
1398 #else /* no REAL_ARITHMETIC */
1399 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1400 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1401 || flag_pretend_float)
1402 && GET_MODE_CLASS (mode) == MODE_FLOAT
1403 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1404 && GET_CODE (op) == CONST_DOUBLE)
1406 /* The constant is stored in the host's word-ordering,
1407 but we want to access it in the target's word-ordering. Some
1408 compilers don't like a conditional inside macro args, so we have two
1409 copies of the return. */
1410 #ifdef HOST_WORDS_BIG_ENDIAN
1411 return GEN_INT (offset == WORDS_BIG_ENDIAN
1412 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1413 #else
1414 return GEN_INT (offset != WORDS_BIG_ENDIAN
1415 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1416 #endif
1418 #endif /* no REAL_ARITHMETIC */
1420 /* Single word float is a little harder, since single- and double-word
1421 values often do not have the same high-order bits. We have already
1422 verified that we want the only defined word of the single-word value. */
1423 #ifdef REAL_ARITHMETIC
1424 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1425 && GET_MODE_BITSIZE (mode) == 32
1426 && GET_CODE (op) == CONST_DOUBLE)
1428 long l;
1429 REAL_VALUE_TYPE rv;
1431 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1432 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1434 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1435 val = l;
1436 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1438 if (BITS_PER_WORD == 16)
1440 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1441 val >>= 16;
1442 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1445 return GEN_INT (val);
1447 #else
1448 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1449 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1450 || flag_pretend_float)
1451 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1452 && GET_MODE_CLASS (mode) == MODE_FLOAT
1453 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1454 && GET_CODE (op) == CONST_DOUBLE)
1456 double d;
1457 union {float f; HOST_WIDE_INT i; } u;
1459 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1461 u.f = d;
1462 return GEN_INT (u.i);
1464 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1465 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1466 || flag_pretend_float)
1467 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1468 && GET_MODE_CLASS (mode) == MODE_FLOAT
1469 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1470 && GET_CODE (op) == CONST_DOUBLE)
1472 double d;
1473 union {double d; HOST_WIDE_INT i; } u;
1475 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1477 u.d = d;
1478 return GEN_INT (u.i);
1480 #endif /* no REAL_ARITHMETIC */
1482 /* The only remaining cases that we can handle are integers.
1483 Convert to proper endianness now since these cases need it.
1484 At this point, offset == 0 means the low-order word.
1486 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1487 in general. However, if OP is (const_int 0), we can just return
1488 it for any word. */
1490 if (op == const0_rtx)
1491 return op;
1493 if (GET_MODE_CLASS (mode) != MODE_INT
1494 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1495 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1496 return 0;
1498 if (WORDS_BIG_ENDIAN)
1499 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1501 /* Find out which word on the host machine this value is in and get
1502 it from the constant. */
1503 val = (offset / size_ratio == 0
1504 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1505 : (GET_CODE (op) == CONST_INT
1506 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1508 /* Get the value we want into the low bits of val. */
1509 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1510 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1512 val = trunc_int_for_mode (val, word_mode);
1514 return GEN_INT (val);
1517 /* Return subword OFFSET of operand OP.
1518 The word number, OFFSET, is interpreted as the word number starting
1519 at the low-order address. OFFSET 0 is the low-order word if not
1520 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1522 If we cannot extract the required word, we return zero. Otherwise,
1523 an rtx corresponding to the requested word will be returned.
1525 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1526 reload has completed, a valid address will always be returned. After
1527 reload, if a valid address cannot be returned, we return zero.
1529 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1530 it is the responsibility of the caller.
1532 MODE is the mode of OP in case it is a CONST_INT.
1534 ??? This is still rather broken for some cases. The problem for the
1535 moment is that all callers of this thing provide no 'goal mode' to
1536 tell us to work with. This exists because all callers were written
1537 in a word based SUBREG world.
1538 Now use of this function can be deprecated by simplify_subreg in most
1539 cases.
1543 operand_subword (op, offset, validate_address, mode)
1544 rtx op;
1545 unsigned int offset;
1546 int validate_address;
1547 enum machine_mode mode;
1549 if (mode == VOIDmode)
1550 mode = GET_MODE (op);
1552 if (mode == VOIDmode)
1553 abort ();
1555 /* If OP is narrower than a word, fail. */
1556 if (mode != BLKmode
1557 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1558 return 0;
1560 /* If we want a word outside OP, return zero. */
1561 if (mode != BLKmode
1562 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1563 return const0_rtx;
1565 /* Form a new MEM at the requested address. */
1566 if (GET_CODE (op) == MEM)
1568 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1570 if (! validate_address)
1571 return new;
1573 else if (reload_completed)
1575 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1576 return 0;
1578 else
1579 return replace_equiv_address (new, XEXP (new, 0));
1582 /* Rest can be handled by simplify_subreg. */
1583 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1586 /* Similar to `operand_subword', but never return 0. If we can't extract
1587 the required subword, put OP into a register and try again. If that fails,
1588 abort. We always validate the address in this case.
1590 MODE is the mode of OP, in case it is CONST_INT. */
1593 operand_subword_force (op, offset, mode)
1594 rtx op;
1595 unsigned int offset;
1596 enum machine_mode mode;
1598 rtx result = operand_subword (op, offset, 1, mode);
1600 if (result)
1601 return result;
1603 if (mode != BLKmode && mode != VOIDmode)
1605 /* If this is a register which can not be accessed by words, copy it
1606 to a pseudo register. */
1607 if (GET_CODE (op) == REG)
1608 op = copy_to_reg (op);
1609 else
1610 op = force_reg (mode, op);
1613 result = operand_subword (op, offset, 1, mode);
1614 if (result == 0)
1615 abort ();
1617 return result;
1620 /* Given a compare instruction, swap the operands.
1621 A test instruction is changed into a compare of 0 against the operand. */
1623 void
1624 reverse_comparison (insn)
1625 rtx insn;
1627 rtx body = PATTERN (insn);
1628 rtx comp;
1630 if (GET_CODE (body) == SET)
1631 comp = SET_SRC (body);
1632 else
1633 comp = SET_SRC (XVECEXP (body, 0, 0));
1635 if (GET_CODE (comp) == COMPARE)
1637 rtx op0 = XEXP (comp, 0);
1638 rtx op1 = XEXP (comp, 1);
1639 XEXP (comp, 0) = op1;
1640 XEXP (comp, 1) = op0;
1642 else
1644 rtx new = gen_rtx_COMPARE (VOIDmode,
1645 CONST0_RTX (GET_MODE (comp)), comp);
1646 if (GET_CODE (body) == SET)
1647 SET_SRC (body) = new;
1648 else
1649 SET_SRC (XVECEXP (body, 0, 0)) = new;
1654 /* Given REF, a MEM, and T, either the type of X or the expression
1655 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1656 if we are making a new object of this type. */
1658 void
1659 set_mem_attributes (ref, t, objectp)
1660 rtx ref;
1661 tree t;
1662 int objectp;
1664 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1665 tree decl = MEM_DECL (ref);
1666 rtx offset = MEM_OFFSET (ref);
1667 rtx size = MEM_SIZE (ref);
1668 unsigned int align = MEM_ALIGN (ref);
1669 tree type;
1671 /* It can happen that type_for_mode was given a mode for which there
1672 is no language-level type. In which case it returns NULL, which
1673 we can see here. */
1674 if (t == NULL_TREE)
1675 return;
1677 type = TYPE_P (t) ? t : TREE_TYPE (t);
1679 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1680 wrong answer, as it assumes that DECL_RTL already has the right alias
1681 info. Callers should not set DECL_RTL until after the call to
1682 set_mem_attributes. */
1683 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1684 abort ();
1686 /* Get the alias set from the expression or type (perhaps using a
1687 front-end routine) and use it. */
1688 alias = get_alias_set (t);
1690 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1691 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1692 RTX_UNCHANGING_P (ref)
1693 |= ((lang_hooks.honor_readonly
1694 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1695 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1697 /* If we are making an object of this type, or if this is a DECL, we know
1698 that it is a scalar if the type is not an aggregate. */
1699 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1700 MEM_SCALAR_P (ref) = 1;
1702 /* We can set the alignment from the type if we are making an object,
1703 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1704 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1705 align = MAX (align, TYPE_ALIGN (type));
1707 /* If the size is known, we can set that. */
1708 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1709 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1711 /* If T is not a type, we may be able to deduce some more information about
1712 the expression. */
1713 if (! TYPE_P (t))
1715 maybe_set_unchanging (ref, t);
1716 if (TREE_THIS_VOLATILE (t))
1717 MEM_VOLATILE_P (ref) = 1;
1719 /* Now remove any NOPs: they don't change what the underlying object is.
1720 Likewise for SAVE_EXPR. */
1721 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1722 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
1723 t = TREE_OPERAND (t, 0);
1725 /* If this expression can't be addressed (e.g., it contains a reference
1726 to a non-addressable field), show we don't change its alias set. */
1727 if (! can_address_p (t))
1728 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1730 /* If this is a decl, set the attributes of the MEM from it. */
1731 if (DECL_P (t))
1733 decl = t;
1734 offset = GEN_INT (0);
1735 size = (DECL_SIZE_UNIT (t)
1736 && host_integerp (DECL_SIZE_UNIT (t), 1)
1737 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1738 align = DECL_ALIGN (t);
1741 /* If this is a constant, we know the alignment. */
1742 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1744 align = TYPE_ALIGN (type);
1745 #ifdef CONSTANT_ALIGNMENT
1746 align = CONSTANT_ALIGNMENT (t, align);
1747 #endif
1751 /* Now set the attributes we computed above. */
1752 MEM_ATTRS (ref)
1753 = get_mem_attrs (alias, decl, offset, size, align, GET_MODE (ref));
1755 /* If this is already known to be a scalar or aggregate, we are done. */
1756 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1757 return;
1759 /* If it is a reference into an aggregate, this is part of an aggregate.
1760 Otherwise we don't know. */
1761 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1762 || TREE_CODE (t) == ARRAY_RANGE_REF
1763 || TREE_CODE (t) == BIT_FIELD_REF)
1764 MEM_IN_STRUCT_P (ref) = 1;
1767 /* Set the alias set of MEM to SET. */
1769 void
1770 set_mem_alias_set (mem, set)
1771 rtx mem;
1772 HOST_WIDE_INT set;
1774 #ifdef ENABLE_CHECKING
1775 /* If the new and old alias sets don't conflict, something is wrong. */
1776 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1777 abort ();
1778 #endif
1780 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
1781 MEM_SIZE (mem), MEM_ALIGN (mem),
1782 GET_MODE (mem));
1785 /* Set the alignment of MEM to ALIGN bits. */
1787 void
1788 set_mem_align (mem, align)
1789 rtx mem;
1790 unsigned int align;
1792 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_DECL (mem),
1793 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1794 GET_MODE (mem));
1797 /* Set the decl for MEM to DECL. */
1799 void
1800 set_mem_decl (mem, decl)
1801 rtx mem;
1802 tree decl;
1804 MEM_ATTRS (mem)
1805 = get_mem_attrs (MEM_ALIAS_SET (mem), decl, MEM_OFFSET (mem),
1806 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1809 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1810 and its address changed to ADDR. (VOIDmode means don't change the mode.
1811 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1812 returned memory location is required to be valid. The memory
1813 attributes are not changed. */
1815 static rtx
1816 change_address_1 (memref, mode, addr, validate)
1817 rtx memref;
1818 enum machine_mode mode;
1819 rtx addr;
1820 int validate;
1822 rtx new;
1824 if (GET_CODE (memref) != MEM)
1825 abort ();
1826 if (mode == VOIDmode)
1827 mode = GET_MODE (memref);
1828 if (addr == 0)
1829 addr = XEXP (memref, 0);
1831 if (validate)
1833 if (reload_in_progress || reload_completed)
1835 if (! memory_address_p (mode, addr))
1836 abort ();
1838 else
1839 addr = memory_address (mode, addr);
1842 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1843 return memref;
1845 new = gen_rtx_MEM (mode, addr);
1846 MEM_COPY_ATTRIBUTES (new, memref);
1847 return new;
1850 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1851 way we are changing MEMREF, so we only preserve the alias set. */
1854 change_address (memref, mode, addr)
1855 rtx memref;
1856 enum machine_mode mode;
1857 rtx addr;
1859 rtx new = change_address_1 (memref, mode, addr, 1);
1860 enum machine_mode mmode = GET_MODE (new);
1862 MEM_ATTRS (new)
1863 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1864 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1865 (mmode == BLKmode ? BITS_PER_UNIT
1866 : GET_MODE_ALIGNMENT (mmode)),
1867 mmode);
1869 return new;
1872 /* Return a memory reference like MEMREF, but with its mode changed
1873 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1874 nonzero, the memory address is forced to be valid.
1875 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1876 and caller is responsible for adjusting MEMREF base register. */
1879 adjust_address_1 (memref, mode, offset, validate, adjust)
1880 rtx memref;
1881 enum machine_mode mode;
1882 HOST_WIDE_INT offset;
1883 int validate, adjust;
1885 rtx addr = XEXP (memref, 0);
1886 rtx new;
1887 rtx memoffset = MEM_OFFSET (memref);
1888 rtx size = 0;
1889 unsigned int memalign = MEM_ALIGN (memref);
1891 if (adjust == 0 || offset == 0)
1892 /* ??? Prefer to create garbage instead of creating shared rtl. */
1893 addr = copy_rtx (addr);
1894 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1895 object, we can merge it into the LO_SUM. */
1896 else if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1897 && offset >= 0
1898 && (unsigned HOST_WIDE_INT) offset
1899 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1900 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1901 plus_constant (XEXP (addr, 1), offset));
1902 else
1903 addr = plus_constant (addr, offset);
1905 new = change_address_1 (memref, mode, addr, validate);
1907 /* Compute the new values of the memory attributes due to this adjustment.
1908 We add the offsets and update the alignment. */
1909 if (memoffset)
1910 memoffset = GEN_INT (offset + INTVAL (memoffset));
1912 /* Compute the new alignment by taking the MIN of the alignment and the
1913 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1914 if zero. */
1915 if (offset != 0)
1916 memalign = MIN (memalign, (offset & -offset) * BITS_PER_UNIT);
1918 /* We can compute the size in a number of ways. */
1919 if (GET_MODE (new) != BLKmode)
1920 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1921 else if (MEM_SIZE (memref))
1922 size = plus_constant (MEM_SIZE (memref), -offset);
1924 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
1925 memoffset, size, memalign, GET_MODE (new));
1927 /* At some point, we should validate that this offset is within the object,
1928 if all the appropriate values are known. */
1929 return new;
1932 /* Return a memory reference like MEMREF, but with its mode changed
1933 to MODE and its address changed to ADDR, which is assumed to be
1934 MEMREF offseted by OFFSET bytes. If VALIDATE is
1935 nonzero, the memory address is forced to be valid. */
1938 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
1939 rtx memref;
1940 enum machine_mode mode;
1941 rtx addr;
1942 HOST_WIDE_INT offset;
1943 int validate;
1945 memref = change_address_1 (memref, VOIDmode, addr, validate);
1946 return adjust_address_1 (memref, mode, offset, validate, 0);
1949 /* Return a memory reference like MEMREF, but whose address is changed by
1950 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1951 known to be in OFFSET (possibly 1). */
1954 offset_address (memref, offset, pow2)
1955 rtx memref;
1956 rtx offset;
1957 HOST_WIDE_INT pow2;
1959 rtx new = change_address_1 (memref, VOIDmode,
1960 gen_rtx_PLUS (Pmode, XEXP (memref, 0),
1961 force_reg (Pmode, offset)), 1);
1963 /* Update the alignment to reflect the offset. Reset the offset, which
1964 we don't know. */
1965 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
1966 0, 0, MIN (MEM_ALIGN (memref),
1967 pow2 * BITS_PER_UNIT),
1968 GET_MODE (new));
1969 return new;
1972 /* Return a memory reference like MEMREF, but with its address changed to
1973 ADDR. The caller is asserting that the actual piece of memory pointed
1974 to is the same, just the form of the address is being changed, such as
1975 by putting something into a register. */
1978 replace_equiv_address (memref, addr)
1979 rtx memref;
1980 rtx addr;
1982 /* change_address_1 copies the memory attribute structure without change
1983 and that's exactly what we want here. */
1984 update_temp_slot_address (XEXP (memref, 0), addr);
1985 return change_address_1 (memref, VOIDmode, addr, 1);
1988 /* Likewise, but the reference is not required to be valid. */
1991 replace_equiv_address_nv (memref, addr)
1992 rtx memref;
1993 rtx addr;
1995 return change_address_1 (memref, VOIDmode, addr, 0);
1998 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2001 gen_label_rtx ()
2003 rtx label;
2005 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2006 NULL_RTX, label_num++, NULL, NULL);
2008 LABEL_NUSES (label) = 0;
2009 LABEL_ALTERNATE_NAME (label) = NULL;
2010 return label;
2013 /* For procedure integration. */
2015 /* Install new pointers to the first and last insns in the chain.
2016 Also, set cur_insn_uid to one higher than the last in use.
2017 Used for an inline-procedure after copying the insn chain. */
2019 void
2020 set_new_first_and_last_insn (first, last)
2021 rtx first, last;
2023 rtx insn;
2025 first_insn = first;
2026 last_insn = last;
2027 cur_insn_uid = 0;
2029 for (insn = first; insn; insn = NEXT_INSN (insn))
2030 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2032 cur_insn_uid++;
2035 /* Set the range of label numbers found in the current function.
2036 This is used when belatedly compiling an inline function. */
2038 void
2039 set_new_first_and_last_label_num (first, last)
2040 int first, last;
2042 base_label_num = label_num;
2043 first_label_num = first;
2044 last_label_num = last;
2047 /* Set the last label number found in the current function.
2048 This is used when belatedly compiling an inline function. */
2050 void
2051 set_new_last_label_num (last)
2052 int last;
2054 base_label_num = label_num;
2055 last_label_num = last;
2058 /* Restore all variables describing the current status from the structure *P.
2059 This is used after a nested function. */
2061 void
2062 restore_emit_status (p)
2063 struct function *p ATTRIBUTE_UNUSED;
2065 last_label_num = 0;
2066 clear_emit_caches ();
2069 /* Clear out all parts of the state in F that can safely be discarded
2070 after the function has been compiled, to let garbage collection
2071 reclaim the memory. */
2073 void
2074 free_emit_status (f)
2075 struct function *f;
2077 free (f->emit->x_regno_reg_rtx);
2078 free (f->emit->regno_pointer_align);
2079 free (f->emit->regno_decl);
2080 free (f->emit);
2081 f->emit = NULL;
2084 /* Go through all the RTL insn bodies and copy any invalid shared
2085 structure. This routine should only be called once. */
2087 void
2088 unshare_all_rtl (fndecl, insn)
2089 tree fndecl;
2090 rtx insn;
2092 tree decl;
2094 /* Make sure that virtual parameters are not shared. */
2095 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2096 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2098 /* Make sure that virtual stack slots are not shared. */
2099 unshare_all_decls (DECL_INITIAL (fndecl));
2101 /* Unshare just about everything else. */
2102 unshare_all_rtl_1 (insn);
2104 /* Make sure the addresses of stack slots found outside the insn chain
2105 (such as, in DECL_RTL of a variable) are not shared
2106 with the insn chain.
2108 This special care is necessary when the stack slot MEM does not
2109 actually appear in the insn chain. If it does appear, its address
2110 is unshared from all else at that point. */
2111 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2114 /* Go through all the RTL insn bodies and copy any invalid shared
2115 structure, again. This is a fairly expensive thing to do so it
2116 should be done sparingly. */
2118 void
2119 unshare_all_rtl_again (insn)
2120 rtx insn;
2122 rtx p;
2123 tree decl;
2125 for (p = insn; p; p = NEXT_INSN (p))
2126 if (INSN_P (p))
2128 reset_used_flags (PATTERN (p));
2129 reset_used_flags (REG_NOTES (p));
2130 reset_used_flags (LOG_LINKS (p));
2133 /* Make sure that virtual stack slots are not shared. */
2134 reset_used_decls (DECL_INITIAL (cfun->decl));
2136 /* Make sure that virtual parameters are not shared. */
2137 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2138 reset_used_flags (DECL_RTL (decl));
2140 reset_used_flags (stack_slot_list);
2142 unshare_all_rtl (cfun->decl, insn);
2145 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2146 Assumes the mark bits are cleared at entry. */
2148 static void
2149 unshare_all_rtl_1 (insn)
2150 rtx insn;
2152 for (; insn; insn = NEXT_INSN (insn))
2153 if (INSN_P (insn))
2155 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2156 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2157 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2161 /* Go through all virtual stack slots of a function and copy any
2162 shared structure. */
2163 static void
2164 unshare_all_decls (blk)
2165 tree blk;
2167 tree t;
2169 /* Copy shared decls. */
2170 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2171 if (DECL_RTL_SET_P (t))
2172 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2174 /* Now process sub-blocks. */
2175 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2176 unshare_all_decls (t);
2179 /* Go through all virtual stack slots of a function and mark them as
2180 not shared. */
2181 static void
2182 reset_used_decls (blk)
2183 tree blk;
2185 tree t;
2187 /* Mark decls. */
2188 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2189 if (DECL_RTL_SET_P (t))
2190 reset_used_flags (DECL_RTL (t));
2192 /* Now process sub-blocks. */
2193 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2194 reset_used_decls (t);
2197 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2198 Recursively does the same for subexpressions. */
2201 copy_rtx_if_shared (orig)
2202 rtx orig;
2204 rtx x = orig;
2205 int i;
2206 enum rtx_code code;
2207 const char *format_ptr;
2208 int copied = 0;
2210 if (x == 0)
2211 return 0;
2213 code = GET_CODE (x);
2215 /* These types may be freely shared. */
2217 switch (code)
2219 case REG:
2220 case QUEUED:
2221 case CONST_INT:
2222 case CONST_DOUBLE:
2223 case SYMBOL_REF:
2224 case CODE_LABEL:
2225 case PC:
2226 case CC0:
2227 case SCRATCH:
2228 /* SCRATCH must be shared because they represent distinct values. */
2229 return x;
2231 case CONST:
2232 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2233 a LABEL_REF, it isn't sharable. */
2234 if (GET_CODE (XEXP (x, 0)) == PLUS
2235 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2236 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2237 return x;
2238 break;
2240 case INSN:
2241 case JUMP_INSN:
2242 case CALL_INSN:
2243 case NOTE:
2244 case BARRIER:
2245 /* The chain of insns is not being copied. */
2246 return x;
2248 case MEM:
2249 /* A MEM is allowed to be shared if its address is constant.
2251 We used to allow sharing of MEMs which referenced
2252 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2253 that can lose. instantiate_virtual_regs will not unshare
2254 the MEMs, and combine may change the structure of the address
2255 because it looks safe and profitable in one context, but
2256 in some other context it creates unrecognizable RTL. */
2257 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2258 return x;
2260 break;
2262 default:
2263 break;
2266 /* This rtx may not be shared. If it has already been seen,
2267 replace it with a copy of itself. */
2269 if (x->used)
2271 rtx copy;
2273 copy = rtx_alloc (code);
2274 memcpy (copy, x,
2275 (sizeof (*copy) - sizeof (copy->fld)
2276 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2277 x = copy;
2278 copied = 1;
2280 x->used = 1;
2282 /* Now scan the subexpressions recursively.
2283 We can store any replaced subexpressions directly into X
2284 since we know X is not shared! Any vectors in X
2285 must be copied if X was copied. */
2287 format_ptr = GET_RTX_FORMAT (code);
2289 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2291 switch (*format_ptr++)
2293 case 'e':
2294 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2295 break;
2297 case 'E':
2298 if (XVEC (x, i) != NULL)
2300 int j;
2301 int len = XVECLEN (x, i);
2303 if (copied && len > 0)
2304 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2305 for (j = 0; j < len; j++)
2306 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2308 break;
2311 return x;
2314 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2315 to look for shared sub-parts. */
2317 void
2318 reset_used_flags (x)
2319 rtx x;
2321 int i, j;
2322 enum rtx_code code;
2323 const char *format_ptr;
2325 if (x == 0)
2326 return;
2328 code = GET_CODE (x);
2330 /* These types may be freely shared so we needn't do any resetting
2331 for them. */
2333 switch (code)
2335 case REG:
2336 case QUEUED:
2337 case CONST_INT:
2338 case CONST_DOUBLE:
2339 case SYMBOL_REF:
2340 case CODE_LABEL:
2341 case PC:
2342 case CC0:
2343 return;
2345 case INSN:
2346 case JUMP_INSN:
2347 case CALL_INSN:
2348 case NOTE:
2349 case LABEL_REF:
2350 case BARRIER:
2351 /* The chain of insns is not being copied. */
2352 return;
2354 default:
2355 break;
2358 x->used = 0;
2360 format_ptr = GET_RTX_FORMAT (code);
2361 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2363 switch (*format_ptr++)
2365 case 'e':
2366 reset_used_flags (XEXP (x, i));
2367 break;
2369 case 'E':
2370 for (j = 0; j < XVECLEN (x, i); j++)
2371 reset_used_flags (XVECEXP (x, i, j));
2372 break;
2377 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2378 Return X or the rtx for the pseudo reg the value of X was copied into.
2379 OTHER must be valid as a SET_DEST. */
2382 make_safe_from (x, other)
2383 rtx x, other;
2385 while (1)
2386 switch (GET_CODE (other))
2388 case SUBREG:
2389 other = SUBREG_REG (other);
2390 break;
2391 case STRICT_LOW_PART:
2392 case SIGN_EXTEND:
2393 case ZERO_EXTEND:
2394 other = XEXP (other, 0);
2395 break;
2396 default:
2397 goto done;
2399 done:
2400 if ((GET_CODE (other) == MEM
2401 && ! CONSTANT_P (x)
2402 && GET_CODE (x) != REG
2403 && GET_CODE (x) != SUBREG)
2404 || (GET_CODE (other) == REG
2405 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2406 || reg_mentioned_p (other, x))))
2408 rtx temp = gen_reg_rtx (GET_MODE (x));
2409 emit_move_insn (temp, x);
2410 return temp;
2412 return x;
2415 /* Emission of insns (adding them to the doubly-linked list). */
2417 /* Return the first insn of the current sequence or current function. */
2420 get_insns ()
2422 return first_insn;
2425 /* Return the last insn emitted in current sequence or current function. */
2428 get_last_insn ()
2430 return last_insn;
2433 /* Specify a new insn as the last in the chain. */
2435 void
2436 set_last_insn (insn)
2437 rtx insn;
2439 if (NEXT_INSN (insn) != 0)
2440 abort ();
2441 last_insn = insn;
2444 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2447 get_last_insn_anywhere ()
2449 struct sequence_stack *stack;
2450 if (last_insn)
2451 return last_insn;
2452 for (stack = seq_stack; stack; stack = stack->next)
2453 if (stack->last != 0)
2454 return stack->last;
2455 return 0;
2458 /* Return a number larger than any instruction's uid in this function. */
2461 get_max_uid ()
2463 return cur_insn_uid;
2466 /* Renumber instructions so that no instruction UIDs are wasted. */
2468 void
2469 renumber_insns (stream)
2470 FILE *stream;
2472 rtx insn;
2474 /* If we're not supposed to renumber instructions, don't. */
2475 if (!flag_renumber_insns)
2476 return;
2478 /* If there aren't that many instructions, then it's not really
2479 worth renumbering them. */
2480 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2481 return;
2483 cur_insn_uid = 1;
2485 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2487 if (stream)
2488 fprintf (stream, "Renumbering insn %d to %d\n",
2489 INSN_UID (insn), cur_insn_uid);
2490 INSN_UID (insn) = cur_insn_uid++;
2494 /* Return the next insn. If it is a SEQUENCE, return the first insn
2495 of the sequence. */
2498 next_insn (insn)
2499 rtx insn;
2501 if (insn)
2503 insn = NEXT_INSN (insn);
2504 if (insn && GET_CODE (insn) == INSN
2505 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2506 insn = XVECEXP (PATTERN (insn), 0, 0);
2509 return insn;
2512 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2513 of the sequence. */
2516 previous_insn (insn)
2517 rtx insn;
2519 if (insn)
2521 insn = PREV_INSN (insn);
2522 if (insn && GET_CODE (insn) == INSN
2523 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2524 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2527 return insn;
2530 /* Return the next insn after INSN that is not a NOTE. This routine does not
2531 look inside SEQUENCEs. */
2534 next_nonnote_insn (insn)
2535 rtx insn;
2537 while (insn)
2539 insn = NEXT_INSN (insn);
2540 if (insn == 0 || GET_CODE (insn) != NOTE)
2541 break;
2544 return insn;
2547 /* Return the previous insn before INSN that is not a NOTE. This routine does
2548 not look inside SEQUENCEs. */
2551 prev_nonnote_insn (insn)
2552 rtx insn;
2554 while (insn)
2556 insn = PREV_INSN (insn);
2557 if (insn == 0 || GET_CODE (insn) != NOTE)
2558 break;
2561 return insn;
2564 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2565 or 0, if there is none. This routine does not look inside
2566 SEQUENCEs. */
2569 next_real_insn (insn)
2570 rtx insn;
2572 while (insn)
2574 insn = NEXT_INSN (insn);
2575 if (insn == 0 || GET_CODE (insn) == INSN
2576 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2577 break;
2580 return insn;
2583 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2584 or 0, if there is none. This routine does not look inside
2585 SEQUENCEs. */
2588 prev_real_insn (insn)
2589 rtx insn;
2591 while (insn)
2593 insn = PREV_INSN (insn);
2594 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2595 || GET_CODE (insn) == JUMP_INSN)
2596 break;
2599 return insn;
2602 /* Find the next insn after INSN that really does something. This routine
2603 does not look inside SEQUENCEs. Until reload has completed, this is the
2604 same as next_real_insn. */
2607 active_insn_p (insn)
2608 rtx insn;
2610 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2611 || (GET_CODE (insn) == INSN
2612 && (! reload_completed
2613 || (GET_CODE (PATTERN (insn)) != USE
2614 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2618 next_active_insn (insn)
2619 rtx insn;
2621 while (insn)
2623 insn = NEXT_INSN (insn);
2624 if (insn == 0 || active_insn_p (insn))
2625 break;
2628 return insn;
2631 /* Find the last insn before INSN that really does something. This routine
2632 does not look inside SEQUENCEs. Until reload has completed, this is the
2633 same as prev_real_insn. */
2636 prev_active_insn (insn)
2637 rtx insn;
2639 while (insn)
2641 insn = PREV_INSN (insn);
2642 if (insn == 0 || active_insn_p (insn))
2643 break;
2646 return insn;
2649 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2652 next_label (insn)
2653 rtx insn;
2655 while (insn)
2657 insn = NEXT_INSN (insn);
2658 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2659 break;
2662 return insn;
2665 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2668 prev_label (insn)
2669 rtx insn;
2671 while (insn)
2673 insn = PREV_INSN (insn);
2674 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2675 break;
2678 return insn;
2681 #ifdef HAVE_cc0
2682 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2683 and REG_CC_USER notes so we can find it. */
2685 void
2686 link_cc0_insns (insn)
2687 rtx insn;
2689 rtx user = next_nonnote_insn (insn);
2691 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2692 user = XVECEXP (PATTERN (user), 0, 0);
2694 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2695 REG_NOTES (user));
2696 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2699 /* Return the next insn that uses CC0 after INSN, which is assumed to
2700 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2701 applied to the result of this function should yield INSN).
2703 Normally, this is simply the next insn. However, if a REG_CC_USER note
2704 is present, it contains the insn that uses CC0.
2706 Return 0 if we can't find the insn. */
2709 next_cc0_user (insn)
2710 rtx insn;
2712 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2714 if (note)
2715 return XEXP (note, 0);
2717 insn = next_nonnote_insn (insn);
2718 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2719 insn = XVECEXP (PATTERN (insn), 0, 0);
2721 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2722 return insn;
2724 return 0;
2727 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2728 note, it is the previous insn. */
2731 prev_cc0_setter (insn)
2732 rtx insn;
2734 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2736 if (note)
2737 return XEXP (note, 0);
2739 insn = prev_nonnote_insn (insn);
2740 if (! sets_cc0_p (PATTERN (insn)))
2741 abort ();
2743 return insn;
2745 #endif
2747 /* Increment the label uses for all labels present in rtx. */
2749 static void
2750 mark_label_nuses(x)
2751 rtx x;
2753 enum rtx_code code;
2754 int i, j;
2755 const char *fmt;
2757 code = GET_CODE (x);
2758 if (code == LABEL_REF)
2759 LABEL_NUSES (XEXP (x, 0))++;
2761 fmt = GET_RTX_FORMAT (code);
2762 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2764 if (fmt[i] == 'e')
2765 mark_label_nuses (XEXP (x, i));
2766 else if (fmt[i] == 'E')
2767 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2768 mark_label_nuses (XVECEXP (x, i, j));
2773 /* Try splitting insns that can be split for better scheduling.
2774 PAT is the pattern which might split.
2775 TRIAL is the insn providing PAT.
2776 LAST is non-zero if we should return the last insn of the sequence produced.
2778 If this routine succeeds in splitting, it returns the first or last
2779 replacement insn depending on the value of LAST. Otherwise, it
2780 returns TRIAL. If the insn to be returned can be split, it will be. */
2783 try_split (pat, trial, last)
2784 rtx pat, trial;
2785 int last;
2787 rtx before = PREV_INSN (trial);
2788 rtx after = NEXT_INSN (trial);
2789 int has_barrier = 0;
2790 rtx tem;
2791 rtx note, seq;
2792 int probability;
2794 if (any_condjump_p (trial)
2795 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2796 split_branch_probability = INTVAL (XEXP (note, 0));
2797 probability = split_branch_probability;
2799 seq = split_insns (pat, trial);
2801 split_branch_probability = -1;
2803 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2804 We may need to handle this specially. */
2805 if (after && GET_CODE (after) == BARRIER)
2807 has_barrier = 1;
2808 after = NEXT_INSN (after);
2811 if (seq)
2813 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2814 The latter case will normally arise only when being done so that
2815 it, in turn, will be split (SFmode on the 29k is an example). */
2816 if (GET_CODE (seq) == SEQUENCE)
2818 int i, njumps = 0;
2820 /* Avoid infinite loop if any insn of the result matches
2821 the original pattern. */
2822 for (i = 0; i < XVECLEN (seq, 0); i++)
2823 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2824 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2825 return trial;
2827 /* Mark labels. */
2828 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2829 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2831 rtx insn = XVECEXP (seq, 0, i);
2832 mark_jump_label (PATTERN (insn),
2833 XVECEXP (seq, 0, i), 0);
2834 njumps++;
2835 if (probability != -1
2836 && any_condjump_p (insn)
2837 && !find_reg_note (insn, REG_BR_PROB, 0))
2839 /* We can preserve the REG_BR_PROB notes only if exactly
2840 one jump is created, otherwise the machinde description
2841 is responsible for this step using
2842 split_branch_probability variable. */
2843 if (njumps != 1)
2844 abort ();
2845 REG_NOTES (insn)
2846 = gen_rtx_EXPR_LIST (REG_BR_PROB,
2847 GEN_INT (probability),
2848 REG_NOTES (insn));
2852 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2853 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2854 if (GET_CODE (trial) == CALL_INSN)
2855 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2856 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2857 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2858 = CALL_INSN_FUNCTION_USAGE (trial);
2860 /* Copy notes, particularly those related to the CFG. */
2861 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
2863 switch (REG_NOTE_KIND (note))
2865 case REG_EH_REGION:
2866 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2868 rtx insn = XVECEXP (seq, 0, i);
2869 if (GET_CODE (insn) == CALL_INSN
2870 || (flag_non_call_exceptions
2871 && may_trap_p (PATTERN (insn))))
2872 REG_NOTES (insn)
2873 = gen_rtx_EXPR_LIST (REG_EH_REGION,
2874 XEXP (note, 0),
2875 REG_NOTES (insn));
2877 break;
2879 case REG_NORETURN:
2880 case REG_SETJMP:
2881 case REG_ALWAYS_RETURN:
2882 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2884 rtx insn = XVECEXP (seq, 0, i);
2885 if (GET_CODE (insn) == CALL_INSN)
2886 REG_NOTES (insn)
2887 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2888 XEXP (note, 0),
2889 REG_NOTES (insn));
2891 break;
2893 case REG_NON_LOCAL_GOTO:
2894 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2896 rtx insn = XVECEXP (seq, 0, i);
2897 if (GET_CODE (insn) == JUMP_INSN)
2898 REG_NOTES (insn)
2899 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2900 XEXP (note, 0),
2901 REG_NOTES (insn));
2903 break;
2905 default:
2906 break;
2910 /* If there are LABELS inside the split insns increment the
2911 usage count so we don't delete the label. */
2912 if (GET_CODE (trial) == INSN)
2913 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2914 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
2915 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
2917 tem = emit_insn_after (seq, trial);
2919 delete_related_insns (trial);
2920 if (has_barrier)
2921 emit_barrier_after (tem);
2923 /* Recursively call try_split for each new insn created; by the
2924 time control returns here that insn will be fully split, so
2925 set LAST and continue from the insn after the one returned.
2926 We can't use next_active_insn here since AFTER may be a note.
2927 Ignore deleted insns, which can be occur if not optimizing. */
2928 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2929 if (! INSN_DELETED_P (tem) && INSN_P (tem))
2930 tem = try_split (PATTERN (tem), tem, 1);
2932 /* Avoid infinite loop if the result matches the original pattern. */
2933 else if (rtx_equal_p (seq, pat))
2934 return trial;
2935 else
2937 PATTERN (trial) = seq;
2938 INSN_CODE (trial) = -1;
2939 try_split (seq, trial, last);
2942 /* Return either the first or the last insn, depending on which was
2943 requested. */
2944 return last
2945 ? (after ? PREV_INSN (after) : last_insn)
2946 : NEXT_INSN (before);
2949 return trial;
2952 /* Make and return an INSN rtx, initializing all its slots.
2953 Store PATTERN in the pattern slots. */
2956 make_insn_raw (pattern)
2957 rtx pattern;
2959 rtx insn;
2961 insn = rtx_alloc (INSN);
2963 INSN_UID (insn) = cur_insn_uid++;
2964 PATTERN (insn) = pattern;
2965 INSN_CODE (insn) = -1;
2966 LOG_LINKS (insn) = NULL;
2967 REG_NOTES (insn) = NULL;
2969 #ifdef ENABLE_RTL_CHECKING
2970 if (insn
2971 && INSN_P (insn)
2972 && (returnjump_p (insn)
2973 || (GET_CODE (insn) == SET
2974 && SET_DEST (insn) == pc_rtx)))
2976 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2977 debug_rtx (insn);
2979 #endif
2981 return insn;
2984 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2986 static rtx
2987 make_jump_insn_raw (pattern)
2988 rtx pattern;
2990 rtx insn;
2992 insn = rtx_alloc (JUMP_INSN);
2993 INSN_UID (insn) = cur_insn_uid++;
2995 PATTERN (insn) = pattern;
2996 INSN_CODE (insn) = -1;
2997 LOG_LINKS (insn) = NULL;
2998 REG_NOTES (insn) = NULL;
2999 JUMP_LABEL (insn) = NULL;
3001 return insn;
3004 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3006 static rtx
3007 make_call_insn_raw (pattern)
3008 rtx pattern;
3010 rtx insn;
3012 insn = rtx_alloc (CALL_INSN);
3013 INSN_UID (insn) = cur_insn_uid++;
3015 PATTERN (insn) = pattern;
3016 INSN_CODE (insn) = -1;
3017 LOG_LINKS (insn) = NULL;
3018 REG_NOTES (insn) = NULL;
3019 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3021 return insn;
3024 /* Add INSN to the end of the doubly-linked list.
3025 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3027 void
3028 add_insn (insn)
3029 rtx insn;
3031 PREV_INSN (insn) = last_insn;
3032 NEXT_INSN (insn) = 0;
3034 if (NULL != last_insn)
3035 NEXT_INSN (last_insn) = insn;
3037 if (NULL == first_insn)
3038 first_insn = insn;
3040 last_insn = insn;
3043 /* Add INSN into the doubly-linked list after insn AFTER. This and
3044 the next should be the only functions called to insert an insn once
3045 delay slots have been filled since only they know how to update a
3046 SEQUENCE. */
3048 void
3049 add_insn_after (insn, after)
3050 rtx insn, after;
3052 rtx next = NEXT_INSN (after);
3053 basic_block bb;
3055 if (optimize && INSN_DELETED_P (after))
3056 abort ();
3058 NEXT_INSN (insn) = next;
3059 PREV_INSN (insn) = after;
3061 if (next)
3063 PREV_INSN (next) = insn;
3064 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3065 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3067 else if (last_insn == after)
3068 last_insn = insn;
3069 else
3071 struct sequence_stack *stack = seq_stack;
3072 /* Scan all pending sequences too. */
3073 for (; stack; stack = stack->next)
3074 if (after == stack->last)
3076 stack->last = insn;
3077 break;
3080 if (stack == 0)
3081 abort ();
3084 if (basic_block_for_insn
3085 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3086 && (bb = BLOCK_FOR_INSN (after)))
3088 set_block_for_insn (insn, bb);
3089 /* Should not happen as first in the BB is always
3090 eigther NOTE or LABEL. */
3091 if (bb->end == after
3092 /* Avoid clobbering of structure when creating new BB. */
3093 && GET_CODE (insn) != BARRIER
3094 && (GET_CODE (insn) != NOTE
3095 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3096 bb->end = insn;
3099 NEXT_INSN (after) = insn;
3100 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3102 rtx sequence = PATTERN (after);
3103 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3107 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3108 the previous should be the only functions called to insert an insn once
3109 delay slots have been filled since only they know how to update a
3110 SEQUENCE. */
3112 void
3113 add_insn_before (insn, before)
3114 rtx insn, before;
3116 rtx prev = PREV_INSN (before);
3117 basic_block bb;
3119 if (optimize && INSN_DELETED_P (before))
3120 abort ();
3122 PREV_INSN (insn) = prev;
3123 NEXT_INSN (insn) = before;
3125 if (prev)
3127 NEXT_INSN (prev) = insn;
3128 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3130 rtx sequence = PATTERN (prev);
3131 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3134 else if (first_insn == before)
3135 first_insn = insn;
3136 else
3138 struct sequence_stack *stack = seq_stack;
3139 /* Scan all pending sequences too. */
3140 for (; stack; stack = stack->next)
3141 if (before == stack->first)
3143 stack->first = insn;
3144 break;
3147 if (stack == 0)
3148 abort ();
3151 if (basic_block_for_insn
3152 && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
3153 && (bb = BLOCK_FOR_INSN (before)))
3155 set_block_for_insn (insn, bb);
3156 /* Should not happen as first in the BB is always
3157 eigther NOTE or LABEl. */
3158 if (bb->head == insn
3159 /* Avoid clobbering of structure when creating new BB. */
3160 && GET_CODE (insn) != BARRIER
3161 && (GET_CODE (insn) != NOTE
3162 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3163 abort ();
3166 PREV_INSN (before) = insn;
3167 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3168 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3171 /* Remove an insn from its doubly-linked list. This function knows how
3172 to handle sequences. */
3173 void
3174 remove_insn (insn)
3175 rtx insn;
3177 rtx next = NEXT_INSN (insn);
3178 rtx prev = PREV_INSN (insn);
3179 basic_block bb;
3181 if (prev)
3183 NEXT_INSN (prev) = next;
3184 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3186 rtx sequence = PATTERN (prev);
3187 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3190 else if (first_insn == insn)
3191 first_insn = next;
3192 else
3194 struct sequence_stack *stack = seq_stack;
3195 /* Scan all pending sequences too. */
3196 for (; stack; stack = stack->next)
3197 if (insn == stack->first)
3199 stack->first = next;
3200 break;
3203 if (stack == 0)
3204 abort ();
3207 if (next)
3209 PREV_INSN (next) = prev;
3210 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3211 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3213 else if (last_insn == insn)
3214 last_insn = prev;
3215 else
3217 struct sequence_stack *stack = seq_stack;
3218 /* Scan all pending sequences too. */
3219 for (; stack; stack = stack->next)
3220 if (insn == stack->last)
3222 stack->last = prev;
3223 break;
3226 if (stack == 0)
3227 abort ();
3229 if (basic_block_for_insn
3230 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
3231 && (bb = BLOCK_FOR_INSN (insn)))
3233 if (bb->head == insn)
3235 /* Never ever delete the basic block note without deleting whole basic
3236 block. */
3237 if (GET_CODE (insn) == NOTE)
3238 abort ();
3239 bb->head = next;
3241 if (bb->end == insn)
3242 bb->end = prev;
3246 /* Delete all insns made since FROM.
3247 FROM becomes the new last instruction. */
3249 void
3250 delete_insns_since (from)
3251 rtx from;
3253 if (from == 0)
3254 first_insn = 0;
3255 else
3256 NEXT_INSN (from) = 0;
3257 last_insn = from;
3260 /* This function is deprecated, please use sequences instead.
3262 Move a consecutive bunch of insns to a different place in the chain.
3263 The insns to be moved are those between FROM and TO.
3264 They are moved to a new position after the insn AFTER.
3265 AFTER must not be FROM or TO or any insn in between.
3267 This function does not know about SEQUENCEs and hence should not be
3268 called after delay-slot filling has been done. */
3270 void
3271 reorder_insns_nobb (from, to, after)
3272 rtx from, to, after;
3274 /* Splice this bunch out of where it is now. */
3275 if (PREV_INSN (from))
3276 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3277 if (NEXT_INSN (to))
3278 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3279 if (last_insn == to)
3280 last_insn = PREV_INSN (from);
3281 if (first_insn == from)
3282 first_insn = NEXT_INSN (to);
3284 /* Make the new neighbors point to it and it to them. */
3285 if (NEXT_INSN (after))
3286 PREV_INSN (NEXT_INSN (after)) = to;
3288 NEXT_INSN (to) = NEXT_INSN (after);
3289 PREV_INSN (from) = after;
3290 NEXT_INSN (after) = from;
3291 if (after == last_insn)
3292 last_insn = to;
3295 /* Same as function above, but take care to update BB boundaries. */
3296 void
3297 reorder_insns (from, to, after)
3298 rtx from, to, after;
3300 rtx prev = PREV_INSN (from);
3301 basic_block bb, bb2;
3303 reorder_insns_nobb (from, to, after);
3305 if (basic_block_for_insn
3306 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3307 && (bb = BLOCK_FOR_INSN (after)))
3309 rtx x;
3311 if (basic_block_for_insn
3312 && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
3313 && (bb2 = BLOCK_FOR_INSN (from)))
3315 if (bb2->end == to)
3316 bb2->end = prev;
3319 if (bb->end == after)
3320 bb->end = to;
3322 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3323 set_block_for_insn (x, bb);
3327 /* Return the line note insn preceding INSN. */
3329 static rtx
3330 find_line_note (insn)
3331 rtx insn;
3333 if (no_line_numbers)
3334 return 0;
3336 for (; insn; insn = PREV_INSN (insn))
3337 if (GET_CODE (insn) == NOTE
3338 && NOTE_LINE_NUMBER (insn) >= 0)
3339 break;
3341 return insn;
3344 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3345 of the moved insns when debugging. This may insert a note between AFTER
3346 and FROM, and another one after TO. */
3348 void
3349 reorder_insns_with_line_notes (from, to, after)
3350 rtx from, to, after;
3352 rtx from_line = find_line_note (from);
3353 rtx after_line = find_line_note (after);
3355 reorder_insns (from, to, after);
3357 if (from_line == after_line)
3358 return;
3360 if (from_line)
3361 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3362 NOTE_LINE_NUMBER (from_line),
3363 after);
3364 if (after_line)
3365 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3366 NOTE_LINE_NUMBER (after_line),
3367 to);
3370 /* Remove unnecessary notes from the instruction stream. */
3372 void
3373 remove_unnecessary_notes ()
3375 rtx block_stack = NULL_RTX;
3376 rtx eh_stack = NULL_RTX;
3377 rtx insn;
3378 rtx next;
3379 rtx tmp;
3381 /* We must not remove the first instruction in the function because
3382 the compiler depends on the first instruction being a note. */
3383 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3385 /* Remember what's next. */
3386 next = NEXT_INSN (insn);
3388 /* We're only interested in notes. */
3389 if (GET_CODE (insn) != NOTE)
3390 continue;
3392 switch (NOTE_LINE_NUMBER (insn))
3394 case NOTE_INSN_DELETED:
3395 remove_insn (insn);
3396 break;
3398 case NOTE_INSN_EH_REGION_BEG:
3399 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3400 break;
3402 case NOTE_INSN_EH_REGION_END:
3403 /* Too many end notes. */
3404 if (eh_stack == NULL_RTX)
3405 abort ();
3406 /* Mismatched nesting. */
3407 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3408 abort ();
3409 tmp = eh_stack;
3410 eh_stack = XEXP (eh_stack, 1);
3411 free_INSN_LIST_node (tmp);
3412 break;
3414 case NOTE_INSN_BLOCK_BEG:
3415 /* By now, all notes indicating lexical blocks should have
3416 NOTE_BLOCK filled in. */
3417 if (NOTE_BLOCK (insn) == NULL_TREE)
3418 abort ();
3419 block_stack = alloc_INSN_LIST (insn, block_stack);
3420 break;
3422 case NOTE_INSN_BLOCK_END:
3423 /* Too many end notes. */
3424 if (block_stack == NULL_RTX)
3425 abort ();
3426 /* Mismatched nesting. */
3427 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3428 abort ();
3429 tmp = block_stack;
3430 block_stack = XEXP (block_stack, 1);
3431 free_INSN_LIST_node (tmp);
3433 /* Scan back to see if there are any non-note instructions
3434 between INSN and the beginning of this block. If not,
3435 then there is no PC range in the generated code that will
3436 actually be in this block, so there's no point in
3437 remembering the existence of the block. */
3438 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3440 /* This block contains a real instruction. Note that we
3441 don't include labels; if the only thing in the block
3442 is a label, then there are still no PC values that
3443 lie within the block. */
3444 if (INSN_P (tmp))
3445 break;
3447 /* We're only interested in NOTEs. */
3448 if (GET_CODE (tmp) != NOTE)
3449 continue;
3451 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3453 /* We just verified that this BLOCK matches us with
3454 the block_stack check above. Never delete the
3455 BLOCK for the outermost scope of the function; we
3456 can refer to names from that scope even if the
3457 block notes are messed up. */
3458 if (! is_body_block (NOTE_BLOCK (insn))
3459 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3461 remove_insn (tmp);
3462 remove_insn (insn);
3464 break;
3466 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3467 /* There's a nested block. We need to leave the
3468 current block in place since otherwise the debugger
3469 wouldn't be able to show symbols from our block in
3470 the nested block. */
3471 break;
3476 /* Too many begin notes. */
3477 if (block_stack || eh_stack)
3478 abort ();
3482 /* Emit an insn of given code and pattern
3483 at a specified place within the doubly-linked list. */
3485 /* Make an instruction with body PATTERN
3486 and output it before the instruction BEFORE. */
3489 emit_insn_before (pattern, before)
3490 rtx pattern, before;
3492 rtx insn = before;
3494 if (GET_CODE (pattern) == SEQUENCE)
3496 int i;
3498 for (i = 0; i < XVECLEN (pattern, 0); i++)
3500 insn = XVECEXP (pattern, 0, i);
3501 add_insn_before (insn, before);
3504 else
3506 insn = make_insn_raw (pattern);
3507 add_insn_before (insn, before);
3510 return insn;
3513 /* Make an instruction with body PATTERN and code JUMP_INSN
3514 and output it before the instruction BEFORE. */
3517 emit_jump_insn_before (pattern, before)
3518 rtx pattern, before;
3520 rtx insn;
3522 if (GET_CODE (pattern) == SEQUENCE)
3523 insn = emit_insn_before (pattern, before);
3524 else
3526 insn = make_jump_insn_raw (pattern);
3527 add_insn_before (insn, before);
3530 return insn;
3533 /* Make an instruction with body PATTERN and code CALL_INSN
3534 and output it before the instruction BEFORE. */
3537 emit_call_insn_before (pattern, before)
3538 rtx pattern, before;
3540 rtx insn;
3542 if (GET_CODE (pattern) == SEQUENCE)
3543 insn = emit_insn_before (pattern, before);
3544 else
3546 insn = make_call_insn_raw (pattern);
3547 add_insn_before (insn, before);
3548 PUT_CODE (insn, CALL_INSN);
3551 return insn;
3554 /* Make an insn of code BARRIER
3555 and output it before the insn BEFORE. */
3558 emit_barrier_before (before)
3559 rtx before;
3561 rtx insn = rtx_alloc (BARRIER);
3563 INSN_UID (insn) = cur_insn_uid++;
3565 add_insn_before (insn, before);
3566 return insn;
3569 /* Emit the label LABEL before the insn BEFORE. */
3572 emit_label_before (label, before)
3573 rtx label, before;
3575 /* This can be called twice for the same label as a result of the
3576 confusion that follows a syntax error! So make it harmless. */
3577 if (INSN_UID (label) == 0)
3579 INSN_UID (label) = cur_insn_uid++;
3580 add_insn_before (label, before);
3583 return label;
3586 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3589 emit_note_before (subtype, before)
3590 int subtype;
3591 rtx before;
3593 rtx note = rtx_alloc (NOTE);
3594 INSN_UID (note) = cur_insn_uid++;
3595 NOTE_SOURCE_FILE (note) = 0;
3596 NOTE_LINE_NUMBER (note) = subtype;
3598 add_insn_before (note, before);
3599 return note;
3602 /* Make an insn of code INSN with body PATTERN
3603 and output it after the insn AFTER. */
3606 emit_insn_after (pattern, after)
3607 rtx pattern, after;
3609 rtx insn = after;
3611 if (GET_CODE (pattern) == SEQUENCE)
3613 int i;
3615 for (i = 0; i < XVECLEN (pattern, 0); i++)
3617 insn = XVECEXP (pattern, 0, i);
3618 add_insn_after (insn, after);
3619 after = insn;
3622 else
3624 insn = make_insn_raw (pattern);
3625 add_insn_after (insn, after);
3628 return insn;
3631 /* Similar to emit_insn_after, except that line notes are to be inserted so
3632 as to act as if this insn were at FROM. */
3634 void
3635 emit_insn_after_with_line_notes (pattern, after, from)
3636 rtx pattern, after, from;
3638 rtx from_line = find_line_note (from);
3639 rtx after_line = find_line_note (after);
3640 rtx insn = emit_insn_after (pattern, after);
3642 if (from_line)
3643 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3644 NOTE_LINE_NUMBER (from_line),
3645 after);
3647 if (after_line)
3648 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3649 NOTE_LINE_NUMBER (after_line),
3650 insn);
3653 /* Make an insn of code JUMP_INSN with body PATTERN
3654 and output it after the insn AFTER. */
3657 emit_jump_insn_after (pattern, after)
3658 rtx pattern, after;
3660 rtx insn;
3662 if (GET_CODE (pattern) == SEQUENCE)
3663 insn = emit_insn_after (pattern, after);
3664 else
3666 insn = make_jump_insn_raw (pattern);
3667 add_insn_after (insn, after);
3670 return insn;
3673 /* Make an insn of code BARRIER
3674 and output it after the insn AFTER. */
3677 emit_barrier_after (after)
3678 rtx after;
3680 rtx insn = rtx_alloc (BARRIER);
3682 INSN_UID (insn) = cur_insn_uid++;
3684 add_insn_after (insn, after);
3685 return insn;
3688 /* Emit the label LABEL after the insn AFTER. */
3691 emit_label_after (label, after)
3692 rtx label, after;
3694 /* This can be called twice for the same label
3695 as a result of the confusion that follows a syntax error!
3696 So make it harmless. */
3697 if (INSN_UID (label) == 0)
3699 INSN_UID (label) = cur_insn_uid++;
3700 add_insn_after (label, after);
3703 return label;
3706 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3709 emit_note_after (subtype, after)
3710 int subtype;
3711 rtx after;
3713 rtx note = rtx_alloc (NOTE);
3714 INSN_UID (note) = cur_insn_uid++;
3715 NOTE_SOURCE_FILE (note) = 0;
3716 NOTE_LINE_NUMBER (note) = subtype;
3717 add_insn_after (note, after);
3718 return note;
3721 /* Emit a line note for FILE and LINE after the insn AFTER. */
3724 emit_line_note_after (file, line, after)
3725 const char *file;
3726 int line;
3727 rtx after;
3729 rtx note;
3731 if (no_line_numbers && line > 0)
3733 cur_insn_uid++;
3734 return 0;
3737 note = rtx_alloc (NOTE);
3738 INSN_UID (note) = cur_insn_uid++;
3739 NOTE_SOURCE_FILE (note) = file;
3740 NOTE_LINE_NUMBER (note) = line;
3741 add_insn_after (note, after);
3742 return note;
3745 /* Make an insn of code INSN with pattern PATTERN
3746 and add it to the end of the doubly-linked list.
3747 If PATTERN is a SEQUENCE, take the elements of it
3748 and emit an insn for each element.
3750 Returns the last insn emitted. */
3753 emit_insn (pattern)
3754 rtx pattern;
3756 rtx insn = last_insn;
3758 if (GET_CODE (pattern) == SEQUENCE)
3760 int i;
3762 for (i = 0; i < XVECLEN (pattern, 0); i++)
3764 insn = XVECEXP (pattern, 0, i);
3765 add_insn (insn);
3768 else
3770 insn = make_insn_raw (pattern);
3771 add_insn (insn);
3774 return insn;
3777 /* Emit the insns in a chain starting with INSN.
3778 Return the last insn emitted. */
3781 emit_insns (insn)
3782 rtx insn;
3784 rtx last = 0;
3786 while (insn)
3788 rtx next = NEXT_INSN (insn);
3789 add_insn (insn);
3790 last = insn;
3791 insn = next;
3794 return last;
3797 /* Emit the insns in a chain starting with INSN and place them in front of
3798 the insn BEFORE. Return the last insn emitted. */
3801 emit_insns_before (insn, before)
3802 rtx insn;
3803 rtx before;
3805 rtx last = 0;
3807 while (insn)
3809 rtx next = NEXT_INSN (insn);
3810 add_insn_before (insn, before);
3811 last = insn;
3812 insn = next;
3815 return last;
3818 /* Emit the insns in a chain starting with FIRST and place them in back of
3819 the insn AFTER. Return the last insn emitted. */
3822 emit_insns_after (first, after)
3823 rtx first;
3824 rtx after;
3826 rtx last;
3827 rtx after_after;
3828 basic_block bb;
3830 if (!after)
3831 abort ();
3833 if (!first)
3834 return after;
3836 if (basic_block_for_insn
3837 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3838 && (bb = BLOCK_FOR_INSN (after)))
3840 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3841 set_block_for_insn (last, bb);
3842 set_block_for_insn (last, bb);
3843 if (bb->end == after)
3844 bb->end = last;
3846 else
3847 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3848 continue;
3850 after_after = NEXT_INSN (after);
3852 NEXT_INSN (after) = first;
3853 PREV_INSN (first) = after;
3854 NEXT_INSN (last) = after_after;
3855 if (after_after)
3856 PREV_INSN (after_after) = last;
3858 if (after == last_insn)
3859 last_insn = last;
3860 return last;
3863 /* Make an insn of code JUMP_INSN with pattern PATTERN
3864 and add it to the end of the doubly-linked list. */
3867 emit_jump_insn (pattern)
3868 rtx pattern;
3870 if (GET_CODE (pattern) == SEQUENCE)
3871 return emit_insn (pattern);
3872 else
3874 rtx insn = make_jump_insn_raw (pattern);
3875 add_insn (insn);
3876 return insn;
3880 /* Make an insn of code CALL_INSN with pattern PATTERN
3881 and add it to the end of the doubly-linked list. */
3884 emit_call_insn (pattern)
3885 rtx pattern;
3887 if (GET_CODE (pattern) == SEQUENCE)
3888 return emit_insn (pattern);
3889 else
3891 rtx insn = make_call_insn_raw (pattern);
3892 add_insn (insn);
3893 PUT_CODE (insn, CALL_INSN);
3894 return insn;
3898 /* Add the label LABEL to the end of the doubly-linked list. */
3901 emit_label (label)
3902 rtx label;
3904 /* This can be called twice for the same label
3905 as a result of the confusion that follows a syntax error!
3906 So make it harmless. */
3907 if (INSN_UID (label) == 0)
3909 INSN_UID (label) = cur_insn_uid++;
3910 add_insn (label);
3912 return label;
3915 /* Make an insn of code BARRIER
3916 and add it to the end of the doubly-linked list. */
3919 emit_barrier ()
3921 rtx barrier = rtx_alloc (BARRIER);
3922 INSN_UID (barrier) = cur_insn_uid++;
3923 add_insn (barrier);
3924 return barrier;
3927 /* Make an insn of code NOTE
3928 with data-fields specified by FILE and LINE
3929 and add it to the end of the doubly-linked list,
3930 but only if line-numbers are desired for debugging info. */
3933 emit_line_note (file, line)
3934 const char *file;
3935 int line;
3937 set_file_and_line_for_stmt (file, line);
3939 #if 0
3940 if (no_line_numbers)
3941 return 0;
3942 #endif
3944 return emit_note (file, line);
3947 /* Make an insn of code NOTE
3948 with data-fields specified by FILE and LINE
3949 and add it to the end of the doubly-linked list.
3950 If it is a line-number NOTE, omit it if it matches the previous one. */
3953 emit_note (file, line)
3954 const char *file;
3955 int line;
3957 rtx note;
3959 if (line > 0)
3961 if (file && last_filename && !strcmp (file, last_filename)
3962 && line == last_linenum)
3963 return 0;
3964 last_filename = file;
3965 last_linenum = line;
3968 if (no_line_numbers && line > 0)
3970 cur_insn_uid++;
3971 return 0;
3974 note = rtx_alloc (NOTE);
3975 INSN_UID (note) = cur_insn_uid++;
3976 NOTE_SOURCE_FILE (note) = file;
3977 NOTE_LINE_NUMBER (note) = line;
3978 add_insn (note);
3979 return note;
3982 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3985 emit_line_note_force (file, line)
3986 const char *file;
3987 int line;
3989 last_linenum = -1;
3990 return emit_line_note (file, line);
3993 /* Cause next statement to emit a line note even if the line number
3994 has not changed. This is used at the beginning of a function. */
3996 void
3997 force_next_line_note ()
3999 last_linenum = -1;
4002 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4003 note of this type already exists, remove it first. */
4006 set_unique_reg_note (insn, kind, datum)
4007 rtx insn;
4008 enum reg_note kind;
4009 rtx datum;
4011 rtx note = find_reg_note (insn, kind, NULL_RTX);
4013 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4014 It serves no useful purpose and breaks eliminate_regs. */
4015 if ((kind == REG_EQUAL || kind == REG_EQUIV)
4016 && GET_CODE (datum) == ASM_OPERANDS)
4017 return NULL_RTX;
4019 if (note)
4021 XEXP (note, 0) = datum;
4022 return note;
4025 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4026 return REG_NOTES (insn);
4029 /* Return an indication of which type of insn should have X as a body.
4030 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4032 enum rtx_code
4033 classify_insn (x)
4034 rtx x;
4036 if (GET_CODE (x) == CODE_LABEL)
4037 return CODE_LABEL;
4038 if (GET_CODE (x) == CALL)
4039 return CALL_INSN;
4040 if (GET_CODE (x) == RETURN)
4041 return JUMP_INSN;
4042 if (GET_CODE (x) == SET)
4044 if (SET_DEST (x) == pc_rtx)
4045 return JUMP_INSN;
4046 else if (GET_CODE (SET_SRC (x)) == CALL)
4047 return CALL_INSN;
4048 else
4049 return INSN;
4051 if (GET_CODE (x) == PARALLEL)
4053 int j;
4054 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4055 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4056 return CALL_INSN;
4057 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4058 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4059 return JUMP_INSN;
4060 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4061 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4062 return CALL_INSN;
4064 return INSN;
4067 /* Emit the rtl pattern X as an appropriate kind of insn.
4068 If X is a label, it is simply added into the insn chain. */
4071 emit (x)
4072 rtx x;
4074 enum rtx_code code = classify_insn (x);
4076 if (code == CODE_LABEL)
4077 return emit_label (x);
4078 else if (code == INSN)
4079 return emit_insn (x);
4080 else if (code == JUMP_INSN)
4082 rtx insn = emit_jump_insn (x);
4083 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4084 return emit_barrier ();
4085 return insn;
4087 else if (code == CALL_INSN)
4088 return emit_call_insn (x);
4089 else
4090 abort ();
4093 /* Begin emitting insns to a sequence which can be packaged in an
4094 RTL_EXPR. If this sequence will contain something that might cause
4095 the compiler to pop arguments to function calls (because those
4096 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4097 details), use do_pending_stack_adjust before calling this function.
4098 That will ensure that the deferred pops are not accidentally
4099 emitted in the middle of this sequence. */
4101 void
4102 start_sequence ()
4104 struct sequence_stack *tem;
4106 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4108 tem->next = seq_stack;
4109 tem->first = first_insn;
4110 tem->last = last_insn;
4111 tem->sequence_rtl_expr = seq_rtl_expr;
4113 seq_stack = tem;
4115 first_insn = 0;
4116 last_insn = 0;
4119 /* Similarly, but indicate that this sequence will be placed in T, an
4120 RTL_EXPR. See the documentation for start_sequence for more
4121 information about how to use this function. */
4123 void
4124 start_sequence_for_rtl_expr (t)
4125 tree t;
4127 start_sequence ();
4129 seq_rtl_expr = t;
4132 /* Set up the insn chain starting with FIRST as the current sequence,
4133 saving the previously current one. See the documentation for
4134 start_sequence for more information about how to use this function. */
4136 void
4137 push_to_sequence (first)
4138 rtx first;
4140 rtx last;
4142 start_sequence ();
4144 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4146 first_insn = first;
4147 last_insn = last;
4150 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4152 void
4153 push_to_full_sequence (first, last)
4154 rtx first, last;
4156 start_sequence ();
4157 first_insn = first;
4158 last_insn = last;
4159 /* We really should have the end of the insn chain here. */
4160 if (last && NEXT_INSN (last))
4161 abort ();
4164 /* Set up the outer-level insn chain
4165 as the current sequence, saving the previously current one. */
4167 void
4168 push_topmost_sequence ()
4170 struct sequence_stack *stack, *top = NULL;
4172 start_sequence ();
4174 for (stack = seq_stack; stack; stack = stack->next)
4175 top = stack;
4177 first_insn = top->first;
4178 last_insn = top->last;
4179 seq_rtl_expr = top->sequence_rtl_expr;
4182 /* After emitting to the outer-level insn chain, update the outer-level
4183 insn chain, and restore the previous saved state. */
4185 void
4186 pop_topmost_sequence ()
4188 struct sequence_stack *stack, *top = NULL;
4190 for (stack = seq_stack; stack; stack = stack->next)
4191 top = stack;
4193 top->first = first_insn;
4194 top->last = last_insn;
4195 /* ??? Why don't we save seq_rtl_expr here? */
4197 end_sequence ();
4200 /* After emitting to a sequence, restore previous saved state.
4202 To get the contents of the sequence just made, you must call
4203 `gen_sequence' *before* calling here.
4205 If the compiler might have deferred popping arguments while
4206 generating this sequence, and this sequence will not be immediately
4207 inserted into the instruction stream, use do_pending_stack_adjust
4208 before calling gen_sequence. That will ensure that the deferred
4209 pops are inserted into this sequence, and not into some random
4210 location in the instruction stream. See INHIBIT_DEFER_POP for more
4211 information about deferred popping of arguments. */
4213 void
4214 end_sequence ()
4216 struct sequence_stack *tem = seq_stack;
4218 first_insn = tem->first;
4219 last_insn = tem->last;
4220 seq_rtl_expr = tem->sequence_rtl_expr;
4221 seq_stack = tem->next;
4223 free (tem);
4226 /* This works like end_sequence, but records the old sequence in FIRST
4227 and LAST. */
4229 void
4230 end_full_sequence (first, last)
4231 rtx *first, *last;
4233 *first = first_insn;
4234 *last = last_insn;
4235 end_sequence();
4238 /* Return 1 if currently emitting into a sequence. */
4241 in_sequence_p ()
4243 return seq_stack != 0;
4246 /* Generate a SEQUENCE rtx containing the insns already emitted
4247 to the current sequence.
4249 This is how the gen_... function from a DEFINE_EXPAND
4250 constructs the SEQUENCE that it returns. */
4253 gen_sequence ()
4255 rtx result;
4256 rtx tem;
4257 int i;
4258 int len;
4260 /* Count the insns in the chain. */
4261 len = 0;
4262 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4263 len++;
4265 /* If only one insn, return it rather than a SEQUENCE.
4266 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4267 the case of an empty list.)
4268 We only return the pattern of an insn if its code is INSN and it
4269 has no notes. This ensures that no information gets lost. */
4270 if (len == 1
4271 && ! RTX_FRAME_RELATED_P (first_insn)
4272 && GET_CODE (first_insn) == INSN
4273 /* Don't throw away any reg notes. */
4274 && REG_NOTES (first_insn) == 0)
4275 return PATTERN (first_insn);
4277 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4279 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4280 XVECEXP (result, 0, i) = tem;
4282 return result;
4285 /* Put the various virtual registers into REGNO_REG_RTX. */
4287 void
4288 init_virtual_regs (es)
4289 struct emit_status *es;
4291 rtx *ptr = es->x_regno_reg_rtx;
4292 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4293 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4294 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4295 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4296 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4299 void
4300 clear_emit_caches ()
4302 int i;
4304 /* Clear the start_sequence/gen_sequence cache. */
4305 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4306 sequence_result[i] = 0;
4307 free_insn = 0;
4310 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4311 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4312 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4313 static int copy_insn_n_scratches;
4315 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4316 copied an ASM_OPERANDS.
4317 In that case, it is the original input-operand vector. */
4318 static rtvec orig_asm_operands_vector;
4320 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4321 copied an ASM_OPERANDS.
4322 In that case, it is the copied input-operand vector. */
4323 static rtvec copy_asm_operands_vector;
4325 /* Likewise for the constraints vector. */
4326 static rtvec orig_asm_constraints_vector;
4327 static rtvec copy_asm_constraints_vector;
4329 /* Recursively create a new copy of an rtx for copy_insn.
4330 This function differs from copy_rtx in that it handles SCRATCHes and
4331 ASM_OPERANDs properly.
4332 Normally, this function is not used directly; use copy_insn as front end.
4333 However, you could first copy an insn pattern with copy_insn and then use
4334 this function afterwards to properly copy any REG_NOTEs containing
4335 SCRATCHes. */
4338 copy_insn_1 (orig)
4339 rtx orig;
4341 rtx copy;
4342 int i, j;
4343 RTX_CODE code;
4344 const char *format_ptr;
4346 code = GET_CODE (orig);
4348 switch (code)
4350 case REG:
4351 case QUEUED:
4352 case CONST_INT:
4353 case CONST_DOUBLE:
4354 case SYMBOL_REF:
4355 case CODE_LABEL:
4356 case PC:
4357 case CC0:
4358 case ADDRESSOF:
4359 return orig;
4361 case SCRATCH:
4362 for (i = 0; i < copy_insn_n_scratches; i++)
4363 if (copy_insn_scratch_in[i] == orig)
4364 return copy_insn_scratch_out[i];
4365 break;
4367 case CONST:
4368 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4369 a LABEL_REF, it isn't sharable. */
4370 if (GET_CODE (XEXP (orig, 0)) == PLUS
4371 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4372 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4373 return orig;
4374 break;
4376 /* A MEM with a constant address is not sharable. The problem is that
4377 the constant address may need to be reloaded. If the mem is shared,
4378 then reloading one copy of this mem will cause all copies to appear
4379 to have been reloaded. */
4381 default:
4382 break;
4385 copy = rtx_alloc (code);
4387 /* Copy the various flags, and other information. We assume that
4388 all fields need copying, and then clear the fields that should
4389 not be copied. That is the sensible default behavior, and forces
4390 us to explicitly document why we are *not* copying a flag. */
4391 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4393 /* We do not copy the USED flag, which is used as a mark bit during
4394 walks over the RTL. */
4395 copy->used = 0;
4397 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4398 if (GET_RTX_CLASS (code) == 'i')
4400 copy->jump = 0;
4401 copy->call = 0;
4402 copy->frame_related = 0;
4405 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4407 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4409 copy->fld[i] = orig->fld[i];
4410 switch (*format_ptr++)
4412 case 'e':
4413 if (XEXP (orig, i) != NULL)
4414 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4415 break;
4417 case 'E':
4418 case 'V':
4419 if (XVEC (orig, i) == orig_asm_constraints_vector)
4420 XVEC (copy, i) = copy_asm_constraints_vector;
4421 else if (XVEC (orig, i) == orig_asm_operands_vector)
4422 XVEC (copy, i) = copy_asm_operands_vector;
4423 else if (XVEC (orig, i) != NULL)
4425 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4426 for (j = 0; j < XVECLEN (copy, i); j++)
4427 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4429 break;
4431 case 't':
4432 case 'w':
4433 case 'i':
4434 case 's':
4435 case 'S':
4436 case 'u':
4437 case '0':
4438 /* These are left unchanged. */
4439 break;
4441 default:
4442 abort ();
4446 if (code == SCRATCH)
4448 i = copy_insn_n_scratches++;
4449 if (i >= MAX_RECOG_OPERANDS)
4450 abort ();
4451 copy_insn_scratch_in[i] = orig;
4452 copy_insn_scratch_out[i] = copy;
4454 else if (code == ASM_OPERANDS)
4456 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4457 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4458 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4459 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4462 return copy;
4465 /* Create a new copy of an rtx.
4466 This function differs from copy_rtx in that it handles SCRATCHes and
4467 ASM_OPERANDs properly.
4468 INSN doesn't really have to be a full INSN; it could be just the
4469 pattern. */
4471 copy_insn (insn)
4472 rtx insn;
4474 copy_insn_n_scratches = 0;
4475 orig_asm_operands_vector = 0;
4476 orig_asm_constraints_vector = 0;
4477 copy_asm_operands_vector = 0;
4478 copy_asm_constraints_vector = 0;
4479 return copy_insn_1 (insn);
4482 /* Initialize data structures and variables in this file
4483 before generating rtl for each function. */
4485 void
4486 init_emit ()
4488 struct function *f = cfun;
4490 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4491 first_insn = NULL;
4492 last_insn = NULL;
4493 seq_rtl_expr = NULL;
4494 cur_insn_uid = 1;
4495 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4496 last_linenum = 0;
4497 last_filename = 0;
4498 first_label_num = label_num;
4499 last_label_num = 0;
4500 seq_stack = NULL;
4502 clear_emit_caches ();
4504 /* Init the tables that describe all the pseudo regs. */
4506 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4508 f->emit->regno_pointer_align
4509 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4510 sizeof (unsigned char));
4512 regno_reg_rtx
4513 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4515 f->emit->regno_decl
4516 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4518 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4519 init_virtual_regs (f->emit);
4521 /* Indicate that the virtual registers and stack locations are
4522 all pointers. */
4523 REG_POINTER (stack_pointer_rtx) = 1;
4524 REG_POINTER (frame_pointer_rtx) = 1;
4525 REG_POINTER (hard_frame_pointer_rtx) = 1;
4526 REG_POINTER (arg_pointer_rtx) = 1;
4528 REG_POINTER (virtual_incoming_args_rtx) = 1;
4529 REG_POINTER (virtual_stack_vars_rtx) = 1;
4530 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4531 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4532 REG_POINTER (virtual_cfa_rtx) = 1;
4534 #ifdef STACK_BOUNDARY
4535 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4536 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4537 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4538 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4540 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4541 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4542 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4543 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4544 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4545 #endif
4547 #ifdef INIT_EXPANDERS
4548 INIT_EXPANDERS;
4549 #endif
4552 /* Mark SS for GC. */
4554 static void
4555 mark_sequence_stack (ss)
4556 struct sequence_stack *ss;
4558 while (ss)
4560 ggc_mark_rtx (ss->first);
4561 ggc_mark_tree (ss->sequence_rtl_expr);
4562 ss = ss->next;
4566 /* Mark ES for GC. */
4568 void
4569 mark_emit_status (es)
4570 struct emit_status *es;
4572 rtx *r;
4573 tree *t;
4574 int i;
4576 if (es == 0)
4577 return;
4579 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4580 t = es->regno_decl;
4581 i > 0; --i, ++r, ++t)
4583 ggc_mark_rtx (*r);
4584 ggc_mark_tree (*t);
4587 mark_sequence_stack (es->sequence_stack);
4588 ggc_mark_tree (es->sequence_rtl_expr);
4589 ggc_mark_rtx (es->x_first_insn);
4592 /* Create some permanent unique rtl objects shared between all functions.
4593 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4595 void
4596 init_emit_once (line_numbers)
4597 int line_numbers;
4599 int i;
4600 enum machine_mode mode;
4601 enum machine_mode double_mode;
4603 /* Initialize the CONST_INT and memory attribute hash tables. */
4604 const_int_htab = htab_create (37, const_int_htab_hash,
4605 const_int_htab_eq, NULL);
4606 ggc_add_deletable_htab (const_int_htab, 0, 0);
4608 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4609 mem_attrs_htab_eq, NULL);
4610 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4612 no_line_numbers = ! line_numbers;
4614 /* Compute the word and byte modes. */
4616 byte_mode = VOIDmode;
4617 word_mode = VOIDmode;
4618 double_mode = VOIDmode;
4620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4621 mode = GET_MODE_WIDER_MODE (mode))
4623 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4624 && byte_mode == VOIDmode)
4625 byte_mode = mode;
4627 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4628 && word_mode == VOIDmode)
4629 word_mode = mode;
4632 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4633 mode = GET_MODE_WIDER_MODE (mode))
4635 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4636 && double_mode == VOIDmode)
4637 double_mode = mode;
4640 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4642 /* Assign register numbers to the globally defined register rtx.
4643 This must be done at runtime because the register number field
4644 is in a union and some compilers can't initialize unions. */
4646 pc_rtx = gen_rtx (PC, VOIDmode);
4647 cc0_rtx = gen_rtx (CC0, VOIDmode);
4648 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4649 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4650 if (hard_frame_pointer_rtx == 0)
4651 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4652 HARD_FRAME_POINTER_REGNUM);
4653 if (arg_pointer_rtx == 0)
4654 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4655 virtual_incoming_args_rtx =
4656 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4657 virtual_stack_vars_rtx =
4658 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4659 virtual_stack_dynamic_rtx =
4660 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4661 virtual_outgoing_args_rtx =
4662 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4663 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4665 /* These rtx must be roots if GC is enabled. */
4666 ggc_add_rtx_root (global_rtl, GR_MAX);
4668 #ifdef INIT_EXPANDERS
4669 /* This is to initialize {init|mark|free}_machine_status before the first
4670 call to push_function_context_to. This is needed by the Chill front
4671 end which calls push_function_context_to before the first cal to
4672 init_function_start. */
4673 INIT_EXPANDERS;
4674 #endif
4676 /* Create the unique rtx's for certain rtx codes and operand values. */
4678 /* Don't use gen_rtx here since gen_rtx in this case
4679 tries to use these variables. */
4680 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4681 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4682 gen_rtx_raw_CONST_INT (VOIDmode, i);
4683 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4685 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4686 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4687 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4688 else
4689 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4691 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4692 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4693 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4694 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4696 for (i = 0; i <= 2; i++)
4698 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4699 mode = GET_MODE_WIDER_MODE (mode))
4701 rtx tem = rtx_alloc (CONST_DOUBLE);
4702 union real_extract u;
4704 /* Zero any holes in a structure. */
4705 memset ((char *) &u, 0, sizeof u);
4706 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4708 /* Avoid trailing garbage in the rtx. */
4709 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4710 CONST_DOUBLE_LOW (tem) = 0;
4711 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4712 CONST_DOUBLE_HIGH (tem) = 0;
4714 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
4715 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4716 PUT_MODE (tem, mode);
4718 const_tiny_rtx[i][(int) mode] = tem;
4721 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4723 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4724 mode = GET_MODE_WIDER_MODE (mode))
4725 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4728 mode != VOIDmode;
4729 mode = GET_MODE_WIDER_MODE (mode))
4730 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4733 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4734 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4735 const_tiny_rtx[0][i] = const0_rtx;
4737 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4738 if (STORE_FLAG_VALUE == 1)
4739 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4741 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4742 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4743 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4744 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4745 ggc_add_rtx_root (&const_true_rtx, 1);
4747 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4748 return_address_pointer_rtx
4749 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4750 #endif
4752 #ifdef STRUCT_VALUE
4753 struct_value_rtx = STRUCT_VALUE;
4754 #else
4755 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4756 #endif
4758 #ifdef STRUCT_VALUE_INCOMING
4759 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4760 #else
4761 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4762 struct_value_incoming_rtx
4763 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4764 #else
4765 struct_value_incoming_rtx = struct_value_rtx;
4766 #endif
4767 #endif
4769 #ifdef STATIC_CHAIN_REGNUM
4770 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4772 #ifdef STATIC_CHAIN_INCOMING_REGNUM
4773 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4774 static_chain_incoming_rtx
4775 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4776 else
4777 #endif
4778 static_chain_incoming_rtx = static_chain_rtx;
4779 #endif
4781 #ifdef STATIC_CHAIN
4782 static_chain_rtx = STATIC_CHAIN;
4784 #ifdef STATIC_CHAIN_INCOMING
4785 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4786 #else
4787 static_chain_incoming_rtx = static_chain_rtx;
4788 #endif
4789 #endif
4791 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4792 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
4794 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4795 ggc_add_rtx_root (&struct_value_rtx, 1);
4796 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4797 ggc_add_rtx_root (&static_chain_rtx, 1);
4798 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4799 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
4802 /* Query and clear/ restore no_line_numbers. This is used by the
4803 switch / case handling in stmt.c to give proper line numbers in
4804 warnings about unreachable code. */
4807 force_line_numbers ()
4809 int old = no_line_numbers;
4811 no_line_numbers = 0;
4812 if (old)
4813 force_next_line_note ();
4814 return old;
4817 void
4818 restore_line_number_status (old_value)
4819 int old_value;
4821 no_line_numbers = old_value;