* update_web_docs: Allow for the same file name in different
[official-gcc.git] / gcc / emit-rtl.c
blob657e0b612cb6cdf0ed00b1adcf465e446480970f
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
59 /* Commonly used modes. */
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
70 static int label_num = 1;
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
76 static int last_label_num;
78 /* Value label_num had when set_new_first_and_last_label_number was called.
79 If label_num has not changed since then, last_label_num is valid. */
81 static int base_label_num;
83 /* Nonzero means do not generate NOTEs for source line numbers. */
85 static int no_line_numbers;
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these except perhaps the floating-point CONST_DOUBLEs
90 are unique; no other rtx-object will be equal to any of these. */
92 rtx global_rtl[GR_MAX];
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100 rtx const_true_rtx;
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
107 /* All references to the following fixed hard registers go through
108 these unique rtl objects. On machines where the frame-pointer and
109 arg-pointer are the same register, they use the same unique object.
111 After register allocation, other rtl objects which used to be pseudo-regs
112 may be clobbered to refer to the frame-pointer register.
113 But references that were originally to the frame-pointer can be
114 distinguished from the others because they contain frame_pointer_rtx.
116 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
117 tricky: until register elimination has taken place hard_frame_pointer_rtx
118 should be used if it is being set, and frame_pointer_rtx otherwise. After
119 register elimination hard_frame_pointer_rtx should always be used.
120 On machines where the two registers are same (most) then these are the
121 same.
123 In an inline procedure, the stack and frame pointer rtxs may not be
124 used for anything else. */
125 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
126 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
127 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131 /* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135 /* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
140 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142 /* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
145 static htab_t const_int_htab;
147 /* A hash table storing memory attribute structures. */
148 static htab_t mem_attrs_htab;
150 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
151 shortly thrown away. We use two mechanisms to prevent this waste:
153 For sizes up to 5 elements, we keep a SEQUENCE and its associated
154 rtvec for use by gen_sequence. One entry for each size is
155 sufficient because most cases are calls to gen_sequence followed by
156 immediately emitting the SEQUENCE. Reuse is safe since emitting a
157 sequence is destructive on the insn in it anyway and hence can't be
158 redone.
160 We do not bother to save this cached data over nested function calls.
161 Instead, we just reinitialize them. */
163 #define SEQUENCE_RESULT_SIZE 5
165 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
167 /* During RTL generation, we also keep a list of free INSN rtl codes. */
168 static rtx free_insn;
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
181 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
182 int));
183 static void unshare_all_rtl_1 PARAMS ((rtx));
184 static void unshare_all_decls PARAMS ((tree));
185 static void reset_used_decls PARAMS ((tree));
186 static void mark_label_nuses PARAMS ((rtx));
187 static hashval_t const_int_htab_hash PARAMS ((const void *));
188 static int const_int_htab_eq PARAMS ((const void *,
189 const void *));
190 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
191 static int mem_attrs_htab_eq PARAMS ((const void *,
192 const void *));
193 static void mem_attrs_mark PARAMS ((const void *));
194 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
195 rtx, unsigned int,
196 enum machine_mode));
198 /* Probability of the conditional branch currently proceeded by try_split.
199 Set to -1 otherwise. */
200 int split_branch_probability = -1;
202 /* Returns a hash code for X (which is a really a CONST_INT). */
204 static hashval_t
205 const_int_htab_hash (x)
206 const void *x;
208 return (hashval_t) INTVAL ((const struct rtx_def *) x);
211 /* Returns non-zero if the value represented by X (which is really a
212 CONST_INT) is the same as that given by Y (which is really a
213 HOST_WIDE_INT *). */
215 static int
216 const_int_htab_eq (x, y)
217 const void *x;
218 const void *y;
220 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
223 /* Returns a hash code for X (which is a really a mem_attrs *). */
225 static hashval_t
226 mem_attrs_htab_hash (x)
227 const void *x;
229 mem_attrs *p = (mem_attrs *) x;
231 return (p->alias ^ (p->align * 1000)
232 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
233 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
234 ^ (long) p->decl);
237 /* Returns non-zero if the value represented by X (which is really a
238 mem_attrs *) is the same as that given by Y (which is also really a
239 mem_attrs *). */
241 static int
242 mem_attrs_htab_eq (x, y)
243 const void *x;
244 const void *y;
246 mem_attrs *p = (mem_attrs *) x;
247 mem_attrs *q = (mem_attrs *) y;
249 return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
250 && p->size == q->size && p->align == q->align);
253 /* This routine is called when we determine that we need a mem_attrs entry.
254 It marks the associated decl and RTL as being used, if present. */
256 static void
257 mem_attrs_mark (x)
258 const void *x;
260 mem_attrs *p = (mem_attrs *) x;
262 if (p->decl)
263 ggc_mark_tree (p->decl);
265 if (p->offset)
266 ggc_mark_rtx (p->offset);
268 if (p->size)
269 ggc_mark_rtx (p->size);
272 /* Allocate a new mem_attrs structure and insert it into the hash table if
273 one identical to it is not already in the table. We are doing this for
274 MEM of mode MODE. */
276 static mem_attrs *
277 get_mem_attrs (alias, decl, offset, size, align, mode)
278 HOST_WIDE_INT alias;
279 tree decl;
280 rtx offset;
281 rtx size;
282 unsigned int align;
283 enum machine_mode mode;
285 mem_attrs attrs;
286 void **slot;
288 /* If everything is the default, we can just return zero. */
289 if (alias == 0 && decl == 0 && offset == 0
290 && (size == 0
291 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
292 && (align == 1
293 || (mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
294 return 0;
296 attrs.alias = alias;
297 attrs.decl = decl;
298 attrs.offset = offset;
299 attrs.size = size;
300 attrs.align = align;
302 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
303 if (*slot == 0)
305 *slot = ggc_alloc (sizeof (mem_attrs));
306 memcpy (*slot, &attrs, sizeof (mem_attrs));
309 return *slot;
312 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
313 don't attempt to share with the various global pieces of rtl (such as
314 frame_pointer_rtx). */
317 gen_raw_REG (mode, regno)
318 enum machine_mode mode;
319 int regno;
321 rtx x = gen_rtx_raw_REG (mode, regno);
322 ORIGINAL_REGNO (x) = regno;
323 return x;
326 /* There are some RTL codes that require special attention; the generation
327 functions do the raw handling. If you add to this list, modify
328 special_rtx in gengenrtl.c as well. */
331 gen_rtx_CONST_INT (mode, arg)
332 enum machine_mode mode ATTRIBUTE_UNUSED;
333 HOST_WIDE_INT arg;
335 void **slot;
337 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
338 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
340 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
341 if (const_true_rtx && arg == STORE_FLAG_VALUE)
342 return const_true_rtx;
343 #endif
345 /* Look up the CONST_INT in the hash table. */
346 slot = htab_find_slot_with_hash (const_int_htab, &arg,
347 (hashval_t) arg, INSERT);
348 if (*slot == 0)
349 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
351 return (rtx) *slot;
354 /* CONST_DOUBLEs needs special handling because their length is known
355 only at run-time. */
358 gen_rtx_CONST_DOUBLE (mode, arg0, arg1)
359 enum machine_mode mode;
360 HOST_WIDE_INT arg0, arg1;
362 rtx r = rtx_alloc (CONST_DOUBLE);
363 int i;
365 PUT_MODE (r, mode);
366 X0EXP (r, 0) = NULL_RTX;
367 XWINT (r, 1) = arg0;
368 XWINT (r, 2) = arg1;
370 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 2; --i)
371 XWINT (r, i) = 0;
373 return r;
377 gen_rtx_REG (mode, regno)
378 enum machine_mode mode;
379 int regno;
381 /* In case the MD file explicitly references the frame pointer, have
382 all such references point to the same frame pointer. This is
383 used during frame pointer elimination to distinguish the explicit
384 references to these registers from pseudos that happened to be
385 assigned to them.
387 If we have eliminated the frame pointer or arg pointer, we will
388 be using it as a normal register, for example as a spill
389 register. In such cases, we might be accessing it in a mode that
390 is not Pmode and therefore cannot use the pre-allocated rtx.
392 Also don't do this when we are making new REGs in reload, since
393 we don't want to get confused with the real pointers. */
395 if (mode == Pmode && !reload_in_progress)
397 if (regno == FRAME_POINTER_REGNUM)
398 return frame_pointer_rtx;
399 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
400 if (regno == HARD_FRAME_POINTER_REGNUM)
401 return hard_frame_pointer_rtx;
402 #endif
403 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
404 if (regno == ARG_POINTER_REGNUM)
405 return arg_pointer_rtx;
406 #endif
407 #ifdef RETURN_ADDRESS_POINTER_REGNUM
408 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
409 return return_address_pointer_rtx;
410 #endif
411 if (regno == STACK_POINTER_REGNUM)
412 return stack_pointer_rtx;
415 return gen_raw_REG (mode, regno);
419 gen_rtx_MEM (mode, addr)
420 enum machine_mode mode;
421 rtx addr;
423 rtx rt = gen_rtx_raw_MEM (mode, addr);
425 /* This field is not cleared by the mere allocation of the rtx, so
426 we clear it here. */
427 MEM_ATTRS (rt) = 0;
429 return rt;
433 gen_rtx_SUBREG (mode, reg, offset)
434 enum machine_mode mode;
435 rtx reg;
436 int offset;
438 /* This is the most common failure type.
439 Catch it early so we can see who does it. */
440 if ((offset % GET_MODE_SIZE (mode)) != 0)
441 abort ();
443 /* This check isn't usable right now because combine will
444 throw arbitrary crap like a CALL into a SUBREG in
445 gen_lowpart_for_combine so we must just eat it. */
446 #if 0
447 /* Check for this too. */
448 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
449 abort ();
450 #endif
451 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
454 /* Generate a SUBREG representing the least-significant part of REG if MODE
455 is smaller than mode of REG, otherwise paradoxical SUBREG. */
458 gen_lowpart_SUBREG (mode, reg)
459 enum machine_mode mode;
460 rtx reg;
462 enum machine_mode inmode;
464 inmode = GET_MODE (reg);
465 if (inmode == VOIDmode)
466 inmode = mode;
467 return gen_rtx_SUBREG (mode, reg,
468 subreg_lowpart_offset (mode, inmode));
471 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
473 ** This routine generates an RTX of the size specified by
474 ** <code>, which is an RTX code. The RTX structure is initialized
475 ** from the arguments <element1> through <elementn>, which are
476 ** interpreted according to the specific RTX type's format. The
477 ** special machine mode associated with the rtx (if any) is specified
478 ** in <mode>.
480 ** gen_rtx can be invoked in a way which resembles the lisp-like
481 ** rtx it will generate. For example, the following rtx structure:
483 ** (plus:QI (mem:QI (reg:SI 1))
484 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
486 ** ...would be generated by the following C code:
488 ** gen_rtx (PLUS, QImode,
489 ** gen_rtx (MEM, QImode,
490 ** gen_rtx (REG, SImode, 1)),
491 ** gen_rtx (MEM, QImode,
492 ** gen_rtx (PLUS, SImode,
493 ** gen_rtx (REG, SImode, 2),
494 ** gen_rtx (REG, SImode, 3)))),
497 /*VARARGS2*/
499 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
501 int i; /* Array indices... */
502 const char *fmt; /* Current rtx's format... */
503 rtx rt_val; /* RTX to return to caller... */
505 VA_OPEN (p, mode);
506 VA_FIXEDARG (p, enum rtx_code, code);
507 VA_FIXEDARG (p, enum machine_mode, mode);
509 switch (code)
511 case CONST_INT:
512 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
513 break;
515 case CONST_DOUBLE:
517 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
518 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
520 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1);
522 break;
524 case REG:
525 rt_val = gen_rtx_REG (mode, va_arg (p, int));
526 break;
528 case MEM:
529 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
530 break;
532 default:
533 rt_val = rtx_alloc (code); /* Allocate the storage space. */
534 rt_val->mode = mode; /* Store the machine mode... */
536 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
537 for (i = 0; i < GET_RTX_LENGTH (code); i++)
539 switch (*fmt++)
541 case '0': /* Unused field. */
542 break;
544 case 'i': /* An integer? */
545 XINT (rt_val, i) = va_arg (p, int);
546 break;
548 case 'w': /* A wide integer? */
549 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
550 break;
552 case 's': /* A string? */
553 XSTR (rt_val, i) = va_arg (p, char *);
554 break;
556 case 'e': /* An expression? */
557 case 'u': /* An insn? Same except when printing. */
558 XEXP (rt_val, i) = va_arg (p, rtx);
559 break;
561 case 'E': /* An RTX vector? */
562 XVEC (rt_val, i) = va_arg (p, rtvec);
563 break;
565 case 'b': /* A bitmap? */
566 XBITMAP (rt_val, i) = va_arg (p, bitmap);
567 break;
569 case 't': /* A tree? */
570 XTREE (rt_val, i) = va_arg (p, tree);
571 break;
573 default:
574 abort ();
577 break;
580 VA_CLOSE (p);
581 return rt_val;
584 /* gen_rtvec (n, [rt1, ..., rtn])
586 ** This routine creates an rtvec and stores within it the
587 ** pointers to rtx's which are its arguments.
590 /*VARARGS1*/
591 rtvec
592 gen_rtvec VPARAMS ((int n, ...))
594 int i, save_n;
595 rtx *vector;
597 VA_OPEN (p, n);
598 VA_FIXEDARG (p, int, n);
600 if (n == 0)
601 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
603 vector = (rtx *) alloca (n * sizeof (rtx));
605 for (i = 0; i < n; i++)
606 vector[i] = va_arg (p, rtx);
608 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
609 save_n = n;
610 VA_CLOSE (p);
612 return gen_rtvec_v (save_n, vector);
615 rtvec
616 gen_rtvec_v (n, argp)
617 int n;
618 rtx *argp;
620 int i;
621 rtvec rt_val;
623 if (n == 0)
624 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
626 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
628 for (i = 0; i < n; i++)
629 rt_val->elem[i] = *argp++;
631 return rt_val;
634 /* Generate a REG rtx for a new pseudo register of mode MODE.
635 This pseudo is assigned the next sequential register number. */
638 gen_reg_rtx (mode)
639 enum machine_mode mode;
641 struct function *f = cfun;
642 rtx val;
644 /* Don't let anything called after initial flow analysis create new
645 registers. */
646 if (no_new_pseudos)
647 abort ();
649 if (generating_concat_p
650 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
651 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
653 /* For complex modes, don't make a single pseudo.
654 Instead, make a CONCAT of two pseudos.
655 This allows noncontiguous allocation of the real and imaginary parts,
656 which makes much better code. Besides, allocating DCmode
657 pseudos overstrains reload on some machines like the 386. */
658 rtx realpart, imagpart;
659 int size = GET_MODE_UNIT_SIZE (mode);
660 enum machine_mode partmode
661 = mode_for_size (size * BITS_PER_UNIT,
662 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
663 ? MODE_FLOAT : MODE_INT),
666 realpart = gen_reg_rtx (partmode);
667 imagpart = gen_reg_rtx (partmode);
668 return gen_rtx_CONCAT (mode, realpart, imagpart);
671 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
672 enough to have an element for this pseudo reg number. */
674 if (reg_rtx_no == f->emit->regno_pointer_align_length)
676 int old_size = f->emit->regno_pointer_align_length;
677 char *new;
678 rtx *new1;
679 tree *new2;
681 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
682 memset (new + old_size, 0, old_size);
683 f->emit->regno_pointer_align = (unsigned char *) new;
685 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
686 old_size * 2 * sizeof (rtx));
687 memset (new1 + old_size, 0, old_size * sizeof (rtx));
688 regno_reg_rtx = new1;
690 new2 = (tree *) xrealloc (f->emit->regno_decl,
691 old_size * 2 * sizeof (tree));
692 memset (new2 + old_size, 0, old_size * sizeof (tree));
693 f->emit->regno_decl = new2;
695 f->emit->regno_pointer_align_length = old_size * 2;
698 val = gen_raw_REG (mode, reg_rtx_no);
699 regno_reg_rtx[reg_rtx_no++] = val;
700 return val;
703 /* Identify REG (which may be a CONCAT) as a user register. */
705 void
706 mark_user_reg (reg)
707 rtx reg;
709 if (GET_CODE (reg) == CONCAT)
711 REG_USERVAR_P (XEXP (reg, 0)) = 1;
712 REG_USERVAR_P (XEXP (reg, 1)) = 1;
714 else if (GET_CODE (reg) == REG)
715 REG_USERVAR_P (reg) = 1;
716 else
717 abort ();
720 /* Identify REG as a probable pointer register and show its alignment
721 as ALIGN, if nonzero. */
723 void
724 mark_reg_pointer (reg, align)
725 rtx reg;
726 int align;
728 if (! REG_POINTER (reg))
730 REG_POINTER (reg) = 1;
732 if (align)
733 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
735 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
736 /* We can no-longer be sure just how aligned this pointer is */
737 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
740 /* Return 1 plus largest pseudo reg number used in the current function. */
743 max_reg_num ()
745 return reg_rtx_no;
748 /* Return 1 + the largest label number used so far in the current function. */
751 max_label_num ()
753 if (last_label_num && label_num == base_label_num)
754 return last_label_num;
755 return label_num;
758 /* Return first label number used in this function (if any were used). */
761 get_first_label_num ()
763 return first_label_num;
766 /* Return the final regno of X, which is a SUBREG of a hard
767 register. */
769 subreg_hard_regno (x, check_mode)
770 rtx x;
771 int check_mode;
773 enum machine_mode mode = GET_MODE (x);
774 unsigned int byte_offset, base_regno, final_regno;
775 rtx reg = SUBREG_REG (x);
777 /* This is where we attempt to catch illegal subregs
778 created by the compiler. */
779 if (GET_CODE (x) != SUBREG
780 || GET_CODE (reg) != REG)
781 abort ();
782 base_regno = REGNO (reg);
783 if (base_regno >= FIRST_PSEUDO_REGISTER)
784 abort ();
785 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
786 abort ();
788 /* Catch non-congruent offsets too. */
789 byte_offset = SUBREG_BYTE (x);
790 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
791 abort ();
793 final_regno = subreg_regno (x);
795 return final_regno;
798 /* Return a value representing some low-order bits of X, where the number
799 of low-order bits is given by MODE. Note that no conversion is done
800 between floating-point and fixed-point values, rather, the bit
801 representation is returned.
803 This function handles the cases in common between gen_lowpart, below,
804 and two variants in cse.c and combine.c. These are the cases that can
805 be safely handled at all points in the compilation.
807 If this is not a case we can handle, return 0. */
810 gen_lowpart_common (mode, x)
811 enum machine_mode mode;
812 rtx x;
814 int msize = GET_MODE_SIZE (mode);
815 int xsize = GET_MODE_SIZE (GET_MODE (x));
816 int offset = 0;
818 if (GET_MODE (x) == mode)
819 return x;
821 /* MODE must occupy no more words than the mode of X. */
822 if (GET_MODE (x) != VOIDmode
823 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
824 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
825 return 0;
827 offset = subreg_lowpart_offset (mode, GET_MODE (x));
829 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
830 && (GET_MODE_CLASS (mode) == MODE_INT
831 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
833 /* If we are getting the low-order part of something that has been
834 sign- or zero-extended, we can either just use the object being
835 extended or make a narrower extension. If we want an even smaller
836 piece than the size of the object being extended, call ourselves
837 recursively.
839 This case is used mostly by combine and cse. */
841 if (GET_MODE (XEXP (x, 0)) == mode)
842 return XEXP (x, 0);
843 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
844 return gen_lowpart_common (mode, XEXP (x, 0));
845 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
846 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
848 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
849 || GET_CODE (x) == CONCAT)
850 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
851 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
852 from the low-order part of the constant. */
853 else if ((GET_MODE_CLASS (mode) == MODE_INT
854 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
855 && GET_MODE (x) == VOIDmode
856 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
858 /* If MODE is twice the host word size, X is already the desired
859 representation. Otherwise, if MODE is wider than a word, we can't
860 do this. If MODE is exactly a word, return just one CONST_INT. */
862 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
863 return x;
864 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
865 return 0;
866 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
867 return (GET_CODE (x) == CONST_INT ? x
868 : GEN_INT (CONST_DOUBLE_LOW (x)));
869 else
871 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
872 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
873 : CONST_DOUBLE_LOW (x));
875 /* Sign extend to HOST_WIDE_INT. */
876 val = trunc_int_for_mode (val, mode);
878 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
879 : GEN_INT (val));
883 #ifndef REAL_ARITHMETIC
884 /* If X is an integral constant but we want it in floating-point, it
885 must be the case that we have a union of an integer and a floating-point
886 value. If the machine-parameters allow it, simulate that union here
887 and return the result. The two-word and single-word cases are
888 different. */
890 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
891 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
892 || flag_pretend_float)
893 && GET_MODE_CLASS (mode) == MODE_FLOAT
894 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
895 && GET_CODE (x) == CONST_INT
896 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
898 union {HOST_WIDE_INT i; float d; } u;
900 u.i = INTVAL (x);
901 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
903 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
904 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
905 || flag_pretend_float)
906 && GET_MODE_CLASS (mode) == MODE_FLOAT
907 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
908 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
909 && GET_MODE (x) == VOIDmode
910 && (sizeof (double) * HOST_BITS_PER_CHAR
911 == 2 * HOST_BITS_PER_WIDE_INT))
913 union {HOST_WIDE_INT i[2]; double d; } u;
914 HOST_WIDE_INT low, high;
916 if (GET_CODE (x) == CONST_INT)
917 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
918 else
919 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
921 #ifdef HOST_WORDS_BIG_ENDIAN
922 u.i[0] = high, u.i[1] = low;
923 #else
924 u.i[0] = low, u.i[1] = high;
925 #endif
927 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
930 /* Similarly, if this is converting a floating-point value into a
931 single-word integer. Only do this is the host and target parameters are
932 compatible. */
934 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
935 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
936 || flag_pretend_float)
937 && (GET_MODE_CLASS (mode) == MODE_INT
938 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
939 && GET_CODE (x) == CONST_DOUBLE
940 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
941 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
942 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
944 /* Similarly, if this is converting a floating-point value into a
945 two-word integer, we can do this one word at a time and make an
946 integer. Only do this is the host and target parameters are
947 compatible. */
949 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
950 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
951 || flag_pretend_float)
952 && (GET_MODE_CLASS (mode) == MODE_INT
953 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
954 && GET_CODE (x) == CONST_DOUBLE
955 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
956 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
958 rtx lowpart, highpart;
960 lowpart = constant_subword (x,
961 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
962 GET_MODE (x));
963 highpart = constant_subword (x,
964 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
965 GET_MODE (x));
966 if (lowpart && GET_CODE (lowpart) == CONST_INT
967 && highpart && GET_CODE (highpart) == CONST_INT)
968 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
970 #else /* ifndef REAL_ARITHMETIC */
972 /* When we have a FP emulator, we can handle all conversions between
973 FP and integer operands. This simplifies reload because it
974 doesn't have to deal with constructs like (subreg:DI
975 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
976 /* Single-precision floats are always 32-bits and double-precision
977 floats are always 64-bits. */
979 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
980 && GET_MODE_BITSIZE (mode) == 32
981 && GET_CODE (x) == CONST_INT)
983 REAL_VALUE_TYPE r;
984 HOST_WIDE_INT i;
986 i = INTVAL (x);
987 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
988 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
990 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
991 && GET_MODE_BITSIZE (mode) == 64
992 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
993 && GET_MODE (x) == VOIDmode)
995 REAL_VALUE_TYPE r;
996 HOST_WIDE_INT i[2];
997 HOST_WIDE_INT low, high;
999 if (GET_CODE (x) == CONST_INT)
1001 low = INTVAL (x);
1002 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1004 else
1006 low = CONST_DOUBLE_LOW (x);
1007 high = CONST_DOUBLE_HIGH (x);
1010 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1011 target machine. */
1012 if (WORDS_BIG_ENDIAN)
1013 i[0] = high, i[1] = low;
1014 else
1015 i[0] = low, i[1] = high;
1017 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1018 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1020 else if ((GET_MODE_CLASS (mode) == MODE_INT
1021 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1022 && GET_CODE (x) == CONST_DOUBLE
1023 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1025 REAL_VALUE_TYPE r;
1026 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1027 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1029 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1030 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1032 case 32:
1033 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
1034 i[1 - endian] = 0;
1035 break;
1036 case 64:
1037 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
1038 break;
1039 case 96:
1040 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1041 i[3-3*endian] = 0;
1042 break;
1043 case 128:
1044 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1045 break;
1046 default:
1047 abort ();
1050 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1051 and return it. */
1052 #if HOST_BITS_PER_WIDE_INT == 32
1053 return immed_double_const (i[endian], i[1 - endian], mode);
1054 #else
1056 int c;
1058 if (HOST_BITS_PER_WIDE_INT != 64)
1059 abort ();
1061 for (c = 0; c < 4; c++)
1062 i[c] &= ~ (0L);
1064 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1066 case 32:
1067 case 64:
1068 return immed_double_const (((unsigned long) i[endian]) |
1069 (((HOST_WIDE_INT) i[1-endian]) << 32),
1070 0, mode);
1071 case 96:
1072 case 128:
1073 return immed_double_const (((unsigned long) i[endian*3]) |
1074 (((HOST_WIDE_INT) i[1+endian]) << 32),
1075 ((unsigned long) i[2-endian]) |
1076 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
1077 mode);
1078 default:
1079 abort ();
1082 #endif
1084 #endif /* ifndef REAL_ARITHMETIC */
1086 /* Otherwise, we can't do this. */
1087 return 0;
1090 /* Return the real part (which has mode MODE) of a complex value X.
1091 This always comes at the low address in memory. */
1094 gen_realpart (mode, x)
1095 enum machine_mode mode;
1096 rtx x;
1098 if (WORDS_BIG_ENDIAN
1099 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1100 && REG_P (x)
1101 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1102 internal_error
1103 ("Can't access real part of complex value in hard register");
1104 else if (WORDS_BIG_ENDIAN)
1105 return gen_highpart (mode, x);
1106 else
1107 return gen_lowpart (mode, x);
1110 /* Return the imaginary part (which has mode MODE) of a complex value X.
1111 This always comes at the high address in memory. */
1114 gen_imagpart (mode, x)
1115 enum machine_mode mode;
1116 rtx x;
1118 if (WORDS_BIG_ENDIAN)
1119 return gen_lowpart (mode, x);
1120 else if (! WORDS_BIG_ENDIAN
1121 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1122 && REG_P (x)
1123 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1124 internal_error
1125 ("can't access imaginary part of complex value in hard register");
1126 else
1127 return gen_highpart (mode, x);
1130 /* Return 1 iff X, assumed to be a SUBREG,
1131 refers to the real part of the complex value in its containing reg.
1132 Complex values are always stored with the real part in the first word,
1133 regardless of WORDS_BIG_ENDIAN. */
1136 subreg_realpart_p (x)
1137 rtx x;
1139 if (GET_CODE (x) != SUBREG)
1140 abort ();
1142 return ((unsigned int) SUBREG_BYTE (x)
1143 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1146 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1147 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1148 least-significant part of X.
1149 MODE specifies how big a part of X to return;
1150 it usually should not be larger than a word.
1151 If X is a MEM whose address is a QUEUED, the value may be so also. */
1154 gen_lowpart (mode, x)
1155 enum machine_mode mode;
1156 rtx x;
1158 rtx result = gen_lowpart_common (mode, x);
1160 if (result)
1161 return result;
1162 else if (GET_CODE (x) == REG)
1164 /* Must be a hard reg that's not valid in MODE. */
1165 result = gen_lowpart_common (mode, copy_to_reg (x));
1166 if (result == 0)
1167 abort ();
1168 return result;
1170 else if (GET_CODE (x) == MEM)
1172 /* The only additional case we can do is MEM. */
1173 int offset = 0;
1174 if (WORDS_BIG_ENDIAN)
1175 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1176 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1178 if (BYTES_BIG_ENDIAN)
1179 /* Adjust the address so that the address-after-the-data
1180 is unchanged. */
1181 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1182 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1184 return adjust_address (x, mode, offset);
1186 else if (GET_CODE (x) == ADDRESSOF)
1187 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1188 else
1189 abort ();
1192 /* Like `gen_lowpart', but refer to the most significant part.
1193 This is used to access the imaginary part of a complex number. */
1196 gen_highpart (mode, x)
1197 enum machine_mode mode;
1198 rtx x;
1200 unsigned int msize = GET_MODE_SIZE (mode);
1201 rtx result;
1203 /* This case loses if X is a subreg. To catch bugs early,
1204 complain if an invalid MODE is used even in other cases. */
1205 if (msize > UNITS_PER_WORD
1206 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1207 abort ();
1209 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1210 subreg_highpart_offset (mode, GET_MODE (x)));
1212 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1213 the target if we have a MEM. gen_highpart must return a valid operand,
1214 emitting code if necessary to do so. */
1215 if (GET_CODE (result) == MEM)
1216 result = validize_mem (result);
1218 if (!result)
1219 abort ();
1220 return result;
1223 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1224 be VOIDmode constant. */
1226 gen_highpart_mode (outermode, innermode, exp)
1227 enum machine_mode outermode, innermode;
1228 rtx exp;
1230 if (GET_MODE (exp) != VOIDmode)
1232 if (GET_MODE (exp) != innermode)
1233 abort ();
1234 return gen_highpart (outermode, exp);
1236 return simplify_gen_subreg (outermode, exp, innermode,
1237 subreg_highpart_offset (outermode, innermode));
1239 /* Return offset in bytes to get OUTERMODE low part
1240 of the value in mode INNERMODE stored in memory in target format. */
1242 unsigned int
1243 subreg_lowpart_offset (outermode, innermode)
1244 enum machine_mode outermode, innermode;
1246 unsigned int offset = 0;
1247 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1249 if (difference > 0)
1251 if (WORDS_BIG_ENDIAN)
1252 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1253 if (BYTES_BIG_ENDIAN)
1254 offset += difference % UNITS_PER_WORD;
1257 return offset;
1260 /* Return offset in bytes to get OUTERMODE high part
1261 of the value in mode INNERMODE stored in memory in target format. */
1262 unsigned int
1263 subreg_highpart_offset (outermode, innermode)
1264 enum machine_mode outermode, innermode;
1266 unsigned int offset = 0;
1267 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1269 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1270 abort ();
1272 if (difference > 0)
1274 if (! WORDS_BIG_ENDIAN)
1275 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1276 if (! BYTES_BIG_ENDIAN)
1277 offset += difference % UNITS_PER_WORD;
1280 return offset;
1283 /* Return 1 iff X, assumed to be a SUBREG,
1284 refers to the least significant part of its containing reg.
1285 If X is not a SUBREG, always return 1 (it is its own low part!). */
1288 subreg_lowpart_p (x)
1289 rtx x;
1291 if (GET_CODE (x) != SUBREG)
1292 return 1;
1293 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1294 return 0;
1296 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1297 == SUBREG_BYTE (x));
1301 /* Helper routine for all the constant cases of operand_subword.
1302 Some places invoke this directly. */
1305 constant_subword (op, offset, mode)
1306 rtx op;
1307 int offset;
1308 enum machine_mode mode;
1310 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1311 HOST_WIDE_INT val;
1313 /* If OP is already an integer word, return it. */
1314 if (GET_MODE_CLASS (mode) == MODE_INT
1315 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1316 return op;
1318 #ifdef REAL_ARITHMETIC
1319 /* The output is some bits, the width of the target machine's word.
1320 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1321 host can't. */
1322 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1323 && GET_MODE_CLASS (mode) == MODE_FLOAT
1324 && GET_MODE_BITSIZE (mode) == 64
1325 && GET_CODE (op) == CONST_DOUBLE)
1327 long k[2];
1328 REAL_VALUE_TYPE rv;
1330 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1331 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1333 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1334 which the words are written depends on the word endianness.
1335 ??? This is a potential portability problem and should
1336 be fixed at some point.
1338 We must excercise caution with the sign bit. By definition there
1339 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1340 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1341 So we explicitly mask and sign-extend as necessary. */
1342 if (BITS_PER_WORD == 32)
1344 val = k[offset];
1345 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1346 return GEN_INT (val);
1348 #if HOST_BITS_PER_WIDE_INT >= 64
1349 else if (BITS_PER_WORD >= 64 && offset == 0)
1351 val = k[! WORDS_BIG_ENDIAN];
1352 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1353 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1354 return GEN_INT (val);
1356 #endif
1357 else if (BITS_PER_WORD == 16)
1359 val = k[offset >> 1];
1360 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1361 val >>= 16;
1362 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1363 return GEN_INT (val);
1365 else
1366 abort ();
1368 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1369 && GET_MODE_CLASS (mode) == MODE_FLOAT
1370 && GET_MODE_BITSIZE (mode) > 64
1371 && GET_CODE (op) == CONST_DOUBLE)
1373 long k[4];
1374 REAL_VALUE_TYPE rv;
1376 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1377 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1379 if (BITS_PER_WORD == 32)
1381 val = k[offset];
1382 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1383 return GEN_INT (val);
1385 #if HOST_BITS_PER_WIDE_INT >= 64
1386 else if (BITS_PER_WORD >= 64 && offset <= 1)
1388 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1389 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1390 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1391 return GEN_INT (val);
1393 #endif
1394 else
1395 abort ();
1397 #else /* no REAL_ARITHMETIC */
1398 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1399 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1400 || flag_pretend_float)
1401 && GET_MODE_CLASS (mode) == MODE_FLOAT
1402 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1403 && GET_CODE (op) == CONST_DOUBLE)
1405 /* The constant is stored in the host's word-ordering,
1406 but we want to access it in the target's word-ordering. Some
1407 compilers don't like a conditional inside macro args, so we have two
1408 copies of the return. */
1409 #ifdef HOST_WORDS_BIG_ENDIAN
1410 return GEN_INT (offset == WORDS_BIG_ENDIAN
1411 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1412 #else
1413 return GEN_INT (offset != WORDS_BIG_ENDIAN
1414 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1415 #endif
1417 #endif /* no REAL_ARITHMETIC */
1419 /* Single word float is a little harder, since single- and double-word
1420 values often do not have the same high-order bits. We have already
1421 verified that we want the only defined word of the single-word value. */
1422 #ifdef REAL_ARITHMETIC
1423 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1424 && GET_MODE_BITSIZE (mode) == 32
1425 && GET_CODE (op) == CONST_DOUBLE)
1427 long l;
1428 REAL_VALUE_TYPE rv;
1430 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1431 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1433 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1434 val = l;
1435 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1437 if (BITS_PER_WORD == 16)
1439 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1440 val >>= 16;
1441 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1444 return GEN_INT (val);
1446 #else
1447 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1448 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1449 || flag_pretend_float)
1450 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1451 && GET_MODE_CLASS (mode) == MODE_FLOAT
1452 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1453 && GET_CODE (op) == CONST_DOUBLE)
1455 double d;
1456 union {float f; HOST_WIDE_INT i; } u;
1458 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1460 u.f = d;
1461 return GEN_INT (u.i);
1463 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1464 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1465 || flag_pretend_float)
1466 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1467 && GET_MODE_CLASS (mode) == MODE_FLOAT
1468 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1469 && GET_CODE (op) == CONST_DOUBLE)
1471 double d;
1472 union {double d; HOST_WIDE_INT i; } u;
1474 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1476 u.d = d;
1477 return GEN_INT (u.i);
1479 #endif /* no REAL_ARITHMETIC */
1481 /* The only remaining cases that we can handle are integers.
1482 Convert to proper endianness now since these cases need it.
1483 At this point, offset == 0 means the low-order word.
1485 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1486 in general. However, if OP is (const_int 0), we can just return
1487 it for any word. */
1489 if (op == const0_rtx)
1490 return op;
1492 if (GET_MODE_CLASS (mode) != MODE_INT
1493 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1494 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1495 return 0;
1497 if (WORDS_BIG_ENDIAN)
1498 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1500 /* Find out which word on the host machine this value is in and get
1501 it from the constant. */
1502 val = (offset / size_ratio == 0
1503 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1504 : (GET_CODE (op) == CONST_INT
1505 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1507 /* Get the value we want into the low bits of val. */
1508 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1509 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1511 val = trunc_int_for_mode (val, word_mode);
1513 return GEN_INT (val);
1516 /* Return subword OFFSET of operand OP.
1517 The word number, OFFSET, is interpreted as the word number starting
1518 at the low-order address. OFFSET 0 is the low-order word if not
1519 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1521 If we cannot extract the required word, we return zero. Otherwise,
1522 an rtx corresponding to the requested word will be returned.
1524 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1525 reload has completed, a valid address will always be returned. After
1526 reload, if a valid address cannot be returned, we return zero.
1528 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1529 it is the responsibility of the caller.
1531 MODE is the mode of OP in case it is a CONST_INT.
1533 ??? This is still rather broken for some cases. The problem for the
1534 moment is that all callers of this thing provide no 'goal mode' to
1535 tell us to work with. This exists because all callers were written
1536 in a word based SUBREG world.
1537 Now use of this function can be deprecated by simplify_subreg in most
1538 cases.
1542 operand_subword (op, offset, validate_address, mode)
1543 rtx op;
1544 unsigned int offset;
1545 int validate_address;
1546 enum machine_mode mode;
1548 if (mode == VOIDmode)
1549 mode = GET_MODE (op);
1551 if (mode == VOIDmode)
1552 abort ();
1554 /* If OP is narrower than a word, fail. */
1555 if (mode != BLKmode
1556 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1557 return 0;
1559 /* If we want a word outside OP, return zero. */
1560 if (mode != BLKmode
1561 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1562 return const0_rtx;
1564 /* Form a new MEM at the requested address. */
1565 if (GET_CODE (op) == MEM)
1567 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1569 if (! validate_address)
1570 return new;
1572 else if (reload_completed)
1574 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1575 return 0;
1577 else
1578 return replace_equiv_address (new, XEXP (new, 0));
1581 /* Rest can be handled by simplify_subreg. */
1582 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1585 /* Similar to `operand_subword', but never return 0. If we can't extract
1586 the required subword, put OP into a register and try again. If that fails,
1587 abort. We always validate the address in this case.
1589 MODE is the mode of OP, in case it is CONST_INT. */
1592 operand_subword_force (op, offset, mode)
1593 rtx op;
1594 unsigned int offset;
1595 enum machine_mode mode;
1597 rtx result = operand_subword (op, offset, 1, mode);
1599 if (result)
1600 return result;
1602 if (mode != BLKmode && mode != VOIDmode)
1604 /* If this is a register which can not be accessed by words, copy it
1605 to a pseudo register. */
1606 if (GET_CODE (op) == REG)
1607 op = copy_to_reg (op);
1608 else
1609 op = force_reg (mode, op);
1612 result = operand_subword (op, offset, 1, mode);
1613 if (result == 0)
1614 abort ();
1616 return result;
1619 /* Given a compare instruction, swap the operands.
1620 A test instruction is changed into a compare of 0 against the operand. */
1622 void
1623 reverse_comparison (insn)
1624 rtx insn;
1626 rtx body = PATTERN (insn);
1627 rtx comp;
1629 if (GET_CODE (body) == SET)
1630 comp = SET_SRC (body);
1631 else
1632 comp = SET_SRC (XVECEXP (body, 0, 0));
1634 if (GET_CODE (comp) == COMPARE)
1636 rtx op0 = XEXP (comp, 0);
1637 rtx op1 = XEXP (comp, 1);
1638 XEXP (comp, 0) = op1;
1639 XEXP (comp, 1) = op0;
1641 else
1643 rtx new = gen_rtx_COMPARE (VOIDmode,
1644 CONST0_RTX (GET_MODE (comp)), comp);
1645 if (GET_CODE (body) == SET)
1646 SET_SRC (body) = new;
1647 else
1648 SET_SRC (XVECEXP (body, 0, 0)) = new;
1653 /* Given REF, a MEM, and T, either the type of X or the expression
1654 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1655 if we are making a new object of this type. */
1657 void
1658 set_mem_attributes (ref, t, objectp)
1659 rtx ref;
1660 tree t;
1661 int objectp;
1663 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1664 tree decl = MEM_DECL (ref);
1665 rtx offset = MEM_OFFSET (ref);
1666 rtx size = MEM_SIZE (ref);
1667 unsigned int align = MEM_ALIGN (ref);
1668 tree type;
1670 /* It can happen that type_for_mode was given a mode for which there
1671 is no language-level type. In which case it returns NULL, which
1672 we can see here. */
1673 if (t == NULL_TREE)
1674 return;
1676 type = TYPE_P (t) ? t : TREE_TYPE (t);
1678 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1679 wrong answer, as it assumes that DECL_RTL already has the right alias
1680 info. Callers should not set DECL_RTL until after the call to
1681 set_mem_attributes. */
1682 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1683 abort ();
1685 /* Get the alias set from the expression or type (perhaps using a
1686 front-end routine) and use it. */
1687 alias = get_alias_set (t);
1689 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1690 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1691 RTX_UNCHANGING_P (ref)
1692 |= ((lang_hooks.honor_readonly
1693 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1694 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1696 /* If we are making an object of this type, or if this is a DECL, we know
1697 that it is a scalar if the type is not an aggregate. */
1698 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1699 MEM_SCALAR_P (ref) = 1;
1701 /* If the size is known, we can set that. */
1702 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1703 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1705 /* If T is not a type, we may be able to deduce some more information about
1706 the expression. */
1707 if (! TYPE_P (t))
1709 maybe_set_unchanging (ref, t);
1710 if (TREE_THIS_VOLATILE (t))
1711 MEM_VOLATILE_P (ref) = 1;
1713 /* Now remove any NOPs: they don't change what the underlying object is.
1714 Likewise for SAVE_EXPR. */
1715 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1716 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
1717 t = TREE_OPERAND (t, 0);
1719 /* If this expression can't be addressed (e.g., it contains a reference
1720 to a non-addressable field), show we don't change its alias set. */
1721 if (! can_address_p (t))
1722 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1724 /* If this is a decl, set the attributes of the MEM from it. */
1725 if (DECL_P (t))
1727 decl = t;
1728 offset = GEN_INT (0);
1729 size = (DECL_SIZE_UNIT (t)
1730 && host_integerp (DECL_SIZE_UNIT (t), 1)
1731 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1732 align = DECL_ALIGN (t);
1735 /* If this is an INDIRECT_REF, we know its alignment. */
1736 else if (TREE_CODE (t) == INDIRECT_REF)
1737 align = TYPE_ALIGN (type);
1740 /* Now set the attributes we computed above. */
1741 MEM_ATTRS (ref)
1742 = get_mem_attrs (alias, decl, offset, size, align, GET_MODE (ref));
1744 /* If this is already known to be a scalar or aggregate, we are done. */
1745 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1746 return;
1748 /* If it is a reference into an aggregate, this is part of an aggregate.
1749 Otherwise we don't know. */
1750 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1751 || TREE_CODE (t) == ARRAY_RANGE_REF
1752 || TREE_CODE (t) == BIT_FIELD_REF)
1753 MEM_IN_STRUCT_P (ref) = 1;
1756 /* Set the alias set of MEM to SET. */
1758 void
1759 set_mem_alias_set (mem, set)
1760 rtx mem;
1761 HOST_WIDE_INT set;
1763 #ifdef ENABLE_CHECKING
1764 /* If the new and old alias sets don't conflict, something is wrong. */
1765 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1766 abort ();
1767 #endif
1769 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
1770 MEM_SIZE (mem), MEM_ALIGN (mem),
1771 GET_MODE (mem));
1774 /* Set the alignment of MEM to ALIGN bits. */
1776 void
1777 set_mem_align (mem, align)
1778 rtx mem;
1779 unsigned int align;
1781 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_DECL (mem),
1782 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1783 GET_MODE (mem));
1786 /* Set the decl for MEM to DECL. */
1788 void
1789 set_mem_decl (mem, decl)
1790 rtx mem;
1791 tree decl;
1793 MEM_ATTRS (mem)
1794 = get_mem_attrs (MEM_ALIAS_SET (mem), decl, MEM_OFFSET (mem),
1795 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1798 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1799 and its address changed to ADDR. (VOIDmode means don't change the mode.
1800 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1801 returned memory location is required to be valid. The memory
1802 attributes are not changed. */
1804 static rtx
1805 change_address_1 (memref, mode, addr, validate)
1806 rtx memref;
1807 enum machine_mode mode;
1808 rtx addr;
1809 int validate;
1811 rtx new;
1813 if (GET_CODE (memref) != MEM)
1814 abort ();
1815 if (mode == VOIDmode)
1816 mode = GET_MODE (memref);
1817 if (addr == 0)
1818 addr = XEXP (memref, 0);
1820 if (validate)
1822 if (reload_in_progress || reload_completed)
1824 if (! memory_address_p (mode, addr))
1825 abort ();
1827 else
1828 addr = memory_address (mode, addr);
1831 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1832 return memref;
1834 new = gen_rtx_MEM (mode, addr);
1835 MEM_COPY_ATTRIBUTES (new, memref);
1836 return new;
1839 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1840 way we are changing MEMREF, so we only preserve the alias set. */
1843 change_address (memref, mode, addr)
1844 rtx memref;
1845 enum machine_mode mode;
1846 rtx addr;
1848 rtx new = change_address_1 (memref, mode, addr, 1);
1849 enum machine_mode mmode = GET_MODE (new);
1851 MEM_ATTRS (new)
1852 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1853 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1854 (mmode == BLKmode ? 1
1855 : GET_MODE_ALIGNMENT (mmode) / BITS_PER_UNIT),
1856 mmode);
1858 return new;
1861 /* Return a memory reference like MEMREF, but with its mode changed
1862 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1863 nonzero, the memory address is forced to be valid. */
1866 adjust_address_1 (memref, mode, offset, validate)
1867 rtx memref;
1868 enum machine_mode mode;
1869 HOST_WIDE_INT offset;
1870 int validate;
1872 rtx addr = XEXP (memref, 0);
1873 rtx new;
1874 rtx memoffset = MEM_OFFSET (memref);
1875 rtx size = 0;
1876 unsigned int memalign = MEM_ALIGN (memref);
1878 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1879 object, we can merge it into the LO_SUM. */
1880 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1881 && offset >= 0
1882 && (unsigned HOST_WIDE_INT) offset
1883 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1884 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1885 plus_constant (XEXP (addr, 1), offset));
1886 else if (offset == 0)
1887 /* ??? Prefer to create garbage instead of creating shared rtl. */
1888 addr = copy_rtx (addr);
1889 else
1890 addr = plus_constant (addr, offset);
1892 new = change_address_1 (memref, mode, addr, validate);
1894 /* Compute the new values of the memory attributes due to this adjustment.
1895 We add the offsets and update the alignment. */
1896 if (memoffset)
1897 memoffset = GEN_INT (offset + INTVAL (memoffset));
1899 /* Compute the new alignment by taking the MIN of the alignment and the
1900 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1901 if zero. */
1902 if (offset != 0)
1903 memalign = MIN (memalign, (offset & -offset) * BITS_PER_UNIT);
1905 /* We can compute the size in a number of ways. */
1906 if (mode != BLKmode)
1907 size = GEN_INT (GET_MODE_SIZE (mode));
1908 else if (MEM_SIZE (memref))
1909 size = plus_constant (MEM_SIZE (memref), -offset);
1911 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
1912 memoffset, size, memalign, GET_MODE (new));
1914 /* At some point, we should validate that this offset is within the object,
1915 if all the appropriate values are known. */
1916 return new;
1919 /* Return a memory reference like MEMREF, but whose address is changed by
1920 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1921 known to be in OFFSET (possibly 1). */
1924 offset_address (memref, offset, pow2)
1925 rtx memref;
1926 rtx offset;
1927 HOST_WIDE_INT pow2;
1929 rtx new = change_address_1 (memref, VOIDmode,
1930 gen_rtx_PLUS (Pmode, XEXP (memref, 0),
1931 force_reg (Pmode, offset)), 1);
1933 /* Update the alignment to reflect the offset. Reset the offset, which
1934 we don't know. */
1935 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref),
1936 0, 0, MIN (MEM_ALIGN (memref),
1937 pow2 * BITS_PER_UNIT),
1938 GET_MODE (new));
1939 return new;
1942 /* Return a memory reference like MEMREF, but with its address changed to
1943 ADDR. The caller is asserting that the actual piece of memory pointed
1944 to is the same, just the form of the address is being changed, such as
1945 by putting something into a register. */
1948 replace_equiv_address (memref, addr)
1949 rtx memref;
1950 rtx addr;
1952 /* change_address_1 copies the memory attribute structure without change
1953 and that's exactly what we want here. */
1954 return change_address_1 (memref, VOIDmode, addr, 1);
1957 /* Likewise, but the reference is not required to be valid. */
1960 replace_equiv_address_nv (memref, addr)
1961 rtx memref;
1962 rtx addr;
1964 return change_address_1 (memref, VOIDmode, addr, 0);
1967 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1970 gen_label_rtx ()
1972 rtx label;
1974 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1975 NULL_RTX, label_num++, NULL, NULL);
1977 LABEL_NUSES (label) = 0;
1978 LABEL_ALTERNATE_NAME (label) = NULL;
1979 return label;
1982 /* For procedure integration. */
1984 /* Install new pointers to the first and last insns in the chain.
1985 Also, set cur_insn_uid to one higher than the last in use.
1986 Used for an inline-procedure after copying the insn chain. */
1988 void
1989 set_new_first_and_last_insn (first, last)
1990 rtx first, last;
1992 rtx insn;
1994 first_insn = first;
1995 last_insn = last;
1996 cur_insn_uid = 0;
1998 for (insn = first; insn; insn = NEXT_INSN (insn))
1999 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2001 cur_insn_uid++;
2004 /* Set the range of label numbers found in the current function.
2005 This is used when belatedly compiling an inline function. */
2007 void
2008 set_new_first_and_last_label_num (first, last)
2009 int first, last;
2011 base_label_num = label_num;
2012 first_label_num = first;
2013 last_label_num = last;
2016 /* Set the last label number found in the current function.
2017 This is used when belatedly compiling an inline function. */
2019 void
2020 set_new_last_label_num (last)
2021 int last;
2023 base_label_num = label_num;
2024 last_label_num = last;
2027 /* Restore all variables describing the current status from the structure *P.
2028 This is used after a nested function. */
2030 void
2031 restore_emit_status (p)
2032 struct function *p ATTRIBUTE_UNUSED;
2034 last_label_num = 0;
2035 clear_emit_caches ();
2038 /* Clear out all parts of the state in F that can safely be discarded
2039 after the function has been compiled, to let garbage collection
2040 reclaim the memory. */
2042 void
2043 free_emit_status (f)
2044 struct function *f;
2046 free (f->emit->x_regno_reg_rtx);
2047 free (f->emit->regno_pointer_align);
2048 free (f->emit->regno_decl);
2049 free (f->emit);
2050 f->emit = NULL;
2053 /* Go through all the RTL insn bodies and copy any invalid shared
2054 structure. This routine should only be called once. */
2056 void
2057 unshare_all_rtl (fndecl, insn)
2058 tree fndecl;
2059 rtx insn;
2061 tree decl;
2063 /* Make sure that virtual parameters are not shared. */
2064 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2065 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2067 /* Make sure that virtual stack slots are not shared. */
2068 unshare_all_decls (DECL_INITIAL (fndecl));
2070 /* Unshare just about everything else. */
2071 unshare_all_rtl_1 (insn);
2073 /* Make sure the addresses of stack slots found outside the insn chain
2074 (such as, in DECL_RTL of a variable) are not shared
2075 with the insn chain.
2077 This special care is necessary when the stack slot MEM does not
2078 actually appear in the insn chain. If it does appear, its address
2079 is unshared from all else at that point. */
2080 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2083 /* Go through all the RTL insn bodies and copy any invalid shared
2084 structure, again. This is a fairly expensive thing to do so it
2085 should be done sparingly. */
2087 void
2088 unshare_all_rtl_again (insn)
2089 rtx insn;
2091 rtx p;
2092 tree decl;
2094 for (p = insn; p; p = NEXT_INSN (p))
2095 if (INSN_P (p))
2097 reset_used_flags (PATTERN (p));
2098 reset_used_flags (REG_NOTES (p));
2099 reset_used_flags (LOG_LINKS (p));
2102 /* Make sure that virtual stack slots are not shared. */
2103 reset_used_decls (DECL_INITIAL (cfun->decl));
2105 /* Make sure that virtual parameters are not shared. */
2106 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2107 reset_used_flags (DECL_RTL (decl));
2109 reset_used_flags (stack_slot_list);
2111 unshare_all_rtl (cfun->decl, insn);
2114 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2115 Assumes the mark bits are cleared at entry. */
2117 static void
2118 unshare_all_rtl_1 (insn)
2119 rtx insn;
2121 for (; insn; insn = NEXT_INSN (insn))
2122 if (INSN_P (insn))
2124 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2125 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2126 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2130 /* Go through all virtual stack slots of a function and copy any
2131 shared structure. */
2132 static void
2133 unshare_all_decls (blk)
2134 tree blk;
2136 tree t;
2138 /* Copy shared decls. */
2139 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2140 if (DECL_RTL_SET_P (t))
2141 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2143 /* Now process sub-blocks. */
2144 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2145 unshare_all_decls (t);
2148 /* Go through all virtual stack slots of a function and mark them as
2149 not shared. */
2150 static void
2151 reset_used_decls (blk)
2152 tree blk;
2154 tree t;
2156 /* Mark decls. */
2157 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2158 if (DECL_RTL_SET_P (t))
2159 reset_used_flags (DECL_RTL (t));
2161 /* Now process sub-blocks. */
2162 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2163 reset_used_decls (t);
2166 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2167 Recursively does the same for subexpressions. */
2170 copy_rtx_if_shared (orig)
2171 rtx orig;
2173 rtx x = orig;
2174 int i;
2175 enum rtx_code code;
2176 const char *format_ptr;
2177 int copied = 0;
2179 if (x == 0)
2180 return 0;
2182 code = GET_CODE (x);
2184 /* These types may be freely shared. */
2186 switch (code)
2188 case REG:
2189 case QUEUED:
2190 case CONST_INT:
2191 case CONST_DOUBLE:
2192 case SYMBOL_REF:
2193 case CODE_LABEL:
2194 case PC:
2195 case CC0:
2196 case SCRATCH:
2197 /* SCRATCH must be shared because they represent distinct values. */
2198 return x;
2200 case CONST:
2201 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2202 a LABEL_REF, it isn't sharable. */
2203 if (GET_CODE (XEXP (x, 0)) == PLUS
2204 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2205 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2206 return x;
2207 break;
2209 case INSN:
2210 case JUMP_INSN:
2211 case CALL_INSN:
2212 case NOTE:
2213 case BARRIER:
2214 /* The chain of insns is not being copied. */
2215 return x;
2217 case MEM:
2218 /* A MEM is allowed to be shared if its address is constant.
2220 We used to allow sharing of MEMs which referenced
2221 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2222 that can lose. instantiate_virtual_regs will not unshare
2223 the MEMs, and combine may change the structure of the address
2224 because it looks safe and profitable in one context, but
2225 in some other context it creates unrecognizable RTL. */
2226 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2227 return x;
2229 break;
2231 default:
2232 break;
2235 /* This rtx may not be shared. If it has already been seen,
2236 replace it with a copy of itself. */
2238 if (x->used)
2240 rtx copy;
2242 copy = rtx_alloc (code);
2243 memcpy (copy, x,
2244 (sizeof (*copy) - sizeof (copy->fld)
2245 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2246 x = copy;
2247 copied = 1;
2249 x->used = 1;
2251 /* Now scan the subexpressions recursively.
2252 We can store any replaced subexpressions directly into X
2253 since we know X is not shared! Any vectors in X
2254 must be copied if X was copied. */
2256 format_ptr = GET_RTX_FORMAT (code);
2258 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2260 switch (*format_ptr++)
2262 case 'e':
2263 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2264 break;
2266 case 'E':
2267 if (XVEC (x, i) != NULL)
2269 int j;
2270 int len = XVECLEN (x, i);
2272 if (copied && len > 0)
2273 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2274 for (j = 0; j < len; j++)
2275 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2277 break;
2280 return x;
2283 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2284 to look for shared sub-parts. */
2286 void
2287 reset_used_flags (x)
2288 rtx x;
2290 int i, j;
2291 enum rtx_code code;
2292 const char *format_ptr;
2294 if (x == 0)
2295 return;
2297 code = GET_CODE (x);
2299 /* These types may be freely shared so we needn't do any resetting
2300 for them. */
2302 switch (code)
2304 case REG:
2305 case QUEUED:
2306 case CONST_INT:
2307 case CONST_DOUBLE:
2308 case SYMBOL_REF:
2309 case CODE_LABEL:
2310 case PC:
2311 case CC0:
2312 return;
2314 case INSN:
2315 case JUMP_INSN:
2316 case CALL_INSN:
2317 case NOTE:
2318 case LABEL_REF:
2319 case BARRIER:
2320 /* The chain of insns is not being copied. */
2321 return;
2323 default:
2324 break;
2327 x->used = 0;
2329 format_ptr = GET_RTX_FORMAT (code);
2330 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2332 switch (*format_ptr++)
2334 case 'e':
2335 reset_used_flags (XEXP (x, i));
2336 break;
2338 case 'E':
2339 for (j = 0; j < XVECLEN (x, i); j++)
2340 reset_used_flags (XVECEXP (x, i, j));
2341 break;
2346 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2347 Return X or the rtx for the pseudo reg the value of X was copied into.
2348 OTHER must be valid as a SET_DEST. */
2351 make_safe_from (x, other)
2352 rtx x, other;
2354 while (1)
2355 switch (GET_CODE (other))
2357 case SUBREG:
2358 other = SUBREG_REG (other);
2359 break;
2360 case STRICT_LOW_PART:
2361 case SIGN_EXTEND:
2362 case ZERO_EXTEND:
2363 other = XEXP (other, 0);
2364 break;
2365 default:
2366 goto done;
2368 done:
2369 if ((GET_CODE (other) == MEM
2370 && ! CONSTANT_P (x)
2371 && GET_CODE (x) != REG
2372 && GET_CODE (x) != SUBREG)
2373 || (GET_CODE (other) == REG
2374 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2375 || reg_mentioned_p (other, x))))
2377 rtx temp = gen_reg_rtx (GET_MODE (x));
2378 emit_move_insn (temp, x);
2379 return temp;
2381 return x;
2384 /* Emission of insns (adding them to the doubly-linked list). */
2386 /* Return the first insn of the current sequence or current function. */
2389 get_insns ()
2391 return first_insn;
2394 /* Return the last insn emitted in current sequence or current function. */
2397 get_last_insn ()
2399 return last_insn;
2402 /* Specify a new insn as the last in the chain. */
2404 void
2405 set_last_insn (insn)
2406 rtx insn;
2408 if (NEXT_INSN (insn) != 0)
2409 abort ();
2410 last_insn = insn;
2413 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2416 get_last_insn_anywhere ()
2418 struct sequence_stack *stack;
2419 if (last_insn)
2420 return last_insn;
2421 for (stack = seq_stack; stack; stack = stack->next)
2422 if (stack->last != 0)
2423 return stack->last;
2424 return 0;
2427 /* Return a number larger than any instruction's uid in this function. */
2430 get_max_uid ()
2432 return cur_insn_uid;
2435 /* Renumber instructions so that no instruction UIDs are wasted. */
2437 void
2438 renumber_insns (stream)
2439 FILE *stream;
2441 rtx insn;
2443 /* If we're not supposed to renumber instructions, don't. */
2444 if (!flag_renumber_insns)
2445 return;
2447 /* If there aren't that many instructions, then it's not really
2448 worth renumbering them. */
2449 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2450 return;
2452 cur_insn_uid = 1;
2454 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2456 if (stream)
2457 fprintf (stream, "Renumbering insn %d to %d\n",
2458 INSN_UID (insn), cur_insn_uid);
2459 INSN_UID (insn) = cur_insn_uid++;
2463 /* Return the next insn. If it is a SEQUENCE, return the first insn
2464 of the sequence. */
2467 next_insn (insn)
2468 rtx insn;
2470 if (insn)
2472 insn = NEXT_INSN (insn);
2473 if (insn && GET_CODE (insn) == INSN
2474 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2475 insn = XVECEXP (PATTERN (insn), 0, 0);
2478 return insn;
2481 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2482 of the sequence. */
2485 previous_insn (insn)
2486 rtx insn;
2488 if (insn)
2490 insn = PREV_INSN (insn);
2491 if (insn && GET_CODE (insn) == INSN
2492 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2493 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2496 return insn;
2499 /* Return the next insn after INSN that is not a NOTE. This routine does not
2500 look inside SEQUENCEs. */
2503 next_nonnote_insn (insn)
2504 rtx insn;
2506 while (insn)
2508 insn = NEXT_INSN (insn);
2509 if (insn == 0 || GET_CODE (insn) != NOTE)
2510 break;
2513 return insn;
2516 /* Return the previous insn before INSN that is not a NOTE. This routine does
2517 not look inside SEQUENCEs. */
2520 prev_nonnote_insn (insn)
2521 rtx insn;
2523 while (insn)
2525 insn = PREV_INSN (insn);
2526 if (insn == 0 || GET_CODE (insn) != NOTE)
2527 break;
2530 return insn;
2533 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2534 or 0, if there is none. This routine does not look inside
2535 SEQUENCEs. */
2538 next_real_insn (insn)
2539 rtx insn;
2541 while (insn)
2543 insn = NEXT_INSN (insn);
2544 if (insn == 0 || GET_CODE (insn) == INSN
2545 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2546 break;
2549 return insn;
2552 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2553 or 0, if there is none. This routine does not look inside
2554 SEQUENCEs. */
2557 prev_real_insn (insn)
2558 rtx insn;
2560 while (insn)
2562 insn = PREV_INSN (insn);
2563 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2564 || GET_CODE (insn) == JUMP_INSN)
2565 break;
2568 return insn;
2571 /* Find the next insn after INSN that really does something. This routine
2572 does not look inside SEQUENCEs. Until reload has completed, this is the
2573 same as next_real_insn. */
2576 active_insn_p (insn)
2577 rtx insn;
2579 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2580 || (GET_CODE (insn) == INSN
2581 && (! reload_completed
2582 || (GET_CODE (PATTERN (insn)) != USE
2583 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2587 next_active_insn (insn)
2588 rtx insn;
2590 while (insn)
2592 insn = NEXT_INSN (insn);
2593 if (insn == 0 || active_insn_p (insn))
2594 break;
2597 return insn;
2600 /* Find the last insn before INSN that really does something. This routine
2601 does not look inside SEQUENCEs. Until reload has completed, this is the
2602 same as prev_real_insn. */
2605 prev_active_insn (insn)
2606 rtx insn;
2608 while (insn)
2610 insn = PREV_INSN (insn);
2611 if (insn == 0 || active_insn_p (insn))
2612 break;
2615 return insn;
2618 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2621 next_label (insn)
2622 rtx insn;
2624 while (insn)
2626 insn = NEXT_INSN (insn);
2627 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2628 break;
2631 return insn;
2634 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2637 prev_label (insn)
2638 rtx insn;
2640 while (insn)
2642 insn = PREV_INSN (insn);
2643 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2644 break;
2647 return insn;
2650 #ifdef HAVE_cc0
2651 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2652 and REG_CC_USER notes so we can find it. */
2654 void
2655 link_cc0_insns (insn)
2656 rtx insn;
2658 rtx user = next_nonnote_insn (insn);
2660 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2661 user = XVECEXP (PATTERN (user), 0, 0);
2663 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2664 REG_NOTES (user));
2665 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2668 /* Return the next insn that uses CC0 after INSN, which is assumed to
2669 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2670 applied to the result of this function should yield INSN).
2672 Normally, this is simply the next insn. However, if a REG_CC_USER note
2673 is present, it contains the insn that uses CC0.
2675 Return 0 if we can't find the insn. */
2678 next_cc0_user (insn)
2679 rtx insn;
2681 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2683 if (note)
2684 return XEXP (note, 0);
2686 insn = next_nonnote_insn (insn);
2687 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2688 insn = XVECEXP (PATTERN (insn), 0, 0);
2690 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2691 return insn;
2693 return 0;
2696 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2697 note, it is the previous insn. */
2700 prev_cc0_setter (insn)
2701 rtx insn;
2703 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2705 if (note)
2706 return XEXP (note, 0);
2708 insn = prev_nonnote_insn (insn);
2709 if (! sets_cc0_p (PATTERN (insn)))
2710 abort ();
2712 return insn;
2714 #endif
2716 /* Increment the label uses for all labels present in rtx. */
2718 static void
2719 mark_label_nuses(x)
2720 rtx x;
2722 enum rtx_code code;
2723 int i, j;
2724 const char *fmt;
2726 code = GET_CODE (x);
2727 if (code == LABEL_REF)
2728 LABEL_NUSES (XEXP (x, 0))++;
2730 fmt = GET_RTX_FORMAT (code);
2731 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2733 if (fmt[i] == 'e')
2734 mark_label_nuses (XEXP (x, i));
2735 else if (fmt[i] == 'E')
2736 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2737 mark_label_nuses (XVECEXP (x, i, j));
2742 /* Try splitting insns that can be split for better scheduling.
2743 PAT is the pattern which might split.
2744 TRIAL is the insn providing PAT.
2745 LAST is non-zero if we should return the last insn of the sequence produced.
2747 If this routine succeeds in splitting, it returns the first or last
2748 replacement insn depending on the value of LAST. Otherwise, it
2749 returns TRIAL. If the insn to be returned can be split, it will be. */
2752 try_split (pat, trial, last)
2753 rtx pat, trial;
2754 int last;
2756 rtx before = PREV_INSN (trial);
2757 rtx after = NEXT_INSN (trial);
2758 int has_barrier = 0;
2759 rtx tem;
2760 rtx note, seq;
2761 int probability;
2763 if (any_condjump_p (trial)
2764 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2765 split_branch_probability = INTVAL (XEXP (note, 0));
2766 probability = split_branch_probability;
2768 seq = split_insns (pat, trial);
2770 split_branch_probability = -1;
2772 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2773 We may need to handle this specially. */
2774 if (after && GET_CODE (after) == BARRIER)
2776 has_barrier = 1;
2777 after = NEXT_INSN (after);
2780 if (seq)
2782 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2783 The latter case will normally arise only when being done so that
2784 it, in turn, will be split (SFmode on the 29k is an example). */
2785 if (GET_CODE (seq) == SEQUENCE)
2787 int i, njumps = 0;
2789 /* Avoid infinite loop if any insn of the result matches
2790 the original pattern. */
2791 for (i = 0; i < XVECLEN (seq, 0); i++)
2792 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2793 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2794 return trial;
2796 /* Mark labels. */
2797 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2798 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2800 rtx insn = XVECEXP (seq, 0, i);
2801 mark_jump_label (PATTERN (insn),
2802 XVECEXP (seq, 0, i), 0);
2803 njumps++;
2804 if (probability != -1
2805 && any_condjump_p (insn)
2806 && !find_reg_note (insn, REG_BR_PROB, 0))
2808 /* We can preserve the REG_BR_PROB notes only if exactly
2809 one jump is created, otherwise the machinde description
2810 is responsible for this step using
2811 split_branch_probability variable. */
2812 if (njumps != 1)
2813 abort ();
2814 REG_NOTES (insn)
2815 = gen_rtx_EXPR_LIST (REG_BR_PROB,
2816 GEN_INT (probability),
2817 REG_NOTES (insn));
2821 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2822 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2823 if (GET_CODE (trial) == CALL_INSN)
2824 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2825 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2826 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2827 = CALL_INSN_FUNCTION_USAGE (trial);
2829 /* Copy notes, particularly those related to the CFG. */
2830 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
2832 switch (REG_NOTE_KIND (note))
2834 case REG_EH_REGION:
2835 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2837 rtx insn = XVECEXP (seq, 0, i);
2838 if (GET_CODE (insn) == CALL_INSN
2839 || (flag_non_call_exceptions
2840 && may_trap_p (PATTERN (insn))))
2841 REG_NOTES (insn)
2842 = gen_rtx_EXPR_LIST (REG_EH_REGION,
2843 XEXP (note, 0),
2844 REG_NOTES (insn));
2846 break;
2848 case REG_NORETURN:
2849 case REG_SETJMP:
2850 case REG_ALWAYS_RETURN:
2851 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2853 rtx insn = XVECEXP (seq, 0, i);
2854 if (GET_CODE (insn) == CALL_INSN)
2855 REG_NOTES (insn)
2856 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2857 XEXP (note, 0),
2858 REG_NOTES (insn));
2860 break;
2862 case REG_NON_LOCAL_GOTO:
2863 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2865 rtx insn = XVECEXP (seq, 0, i);
2866 if (GET_CODE (insn) == JUMP_INSN)
2867 REG_NOTES (insn)
2868 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2869 XEXP (note, 0),
2870 REG_NOTES (insn));
2872 break;
2874 default:
2875 break;
2879 /* If there are LABELS inside the split insns increment the
2880 usage count so we don't delete the label. */
2881 if (GET_CODE (trial) == INSN)
2882 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2883 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
2884 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
2886 tem = emit_insn_after (seq, trial);
2888 delete_related_insns (trial);
2889 if (has_barrier)
2890 emit_barrier_after (tem);
2892 /* Recursively call try_split for each new insn created; by the
2893 time control returns here that insn will be fully split, so
2894 set LAST and continue from the insn after the one returned.
2895 We can't use next_active_insn here since AFTER may be a note.
2896 Ignore deleted insns, which can be occur if not optimizing. */
2897 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2898 if (! INSN_DELETED_P (tem) && INSN_P (tem))
2899 tem = try_split (PATTERN (tem), tem, 1);
2901 /* Avoid infinite loop if the result matches the original pattern. */
2902 else if (rtx_equal_p (seq, pat))
2903 return trial;
2904 else
2906 PATTERN (trial) = seq;
2907 INSN_CODE (trial) = -1;
2908 try_split (seq, trial, last);
2911 /* Return either the first or the last insn, depending on which was
2912 requested. */
2913 return last
2914 ? (after ? PREV_INSN (after) : last_insn)
2915 : NEXT_INSN (before);
2918 return trial;
2921 /* Make and return an INSN rtx, initializing all its slots.
2922 Store PATTERN in the pattern slots. */
2925 make_insn_raw (pattern)
2926 rtx pattern;
2928 rtx insn;
2930 insn = rtx_alloc (INSN);
2932 INSN_UID (insn) = cur_insn_uid++;
2933 PATTERN (insn) = pattern;
2934 INSN_CODE (insn) = -1;
2935 LOG_LINKS (insn) = NULL;
2936 REG_NOTES (insn) = NULL;
2938 #ifdef ENABLE_RTL_CHECKING
2939 if (insn
2940 && INSN_P (insn)
2941 && (returnjump_p (insn)
2942 || (GET_CODE (insn) == SET
2943 && SET_DEST (insn) == pc_rtx)))
2945 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2946 debug_rtx (insn);
2948 #endif
2950 return insn;
2953 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2955 static rtx
2956 make_jump_insn_raw (pattern)
2957 rtx pattern;
2959 rtx insn;
2961 insn = rtx_alloc (JUMP_INSN);
2962 INSN_UID (insn) = cur_insn_uid++;
2964 PATTERN (insn) = pattern;
2965 INSN_CODE (insn) = -1;
2966 LOG_LINKS (insn) = NULL;
2967 REG_NOTES (insn) = NULL;
2968 JUMP_LABEL (insn) = NULL;
2970 return insn;
2973 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2975 static rtx
2976 make_call_insn_raw (pattern)
2977 rtx pattern;
2979 rtx insn;
2981 insn = rtx_alloc (CALL_INSN);
2982 INSN_UID (insn) = cur_insn_uid++;
2984 PATTERN (insn) = pattern;
2985 INSN_CODE (insn) = -1;
2986 LOG_LINKS (insn) = NULL;
2987 REG_NOTES (insn) = NULL;
2988 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2990 return insn;
2993 /* Add INSN to the end of the doubly-linked list.
2994 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2996 void
2997 add_insn (insn)
2998 rtx insn;
3000 PREV_INSN (insn) = last_insn;
3001 NEXT_INSN (insn) = 0;
3003 if (NULL != last_insn)
3004 NEXT_INSN (last_insn) = insn;
3006 if (NULL == first_insn)
3007 first_insn = insn;
3009 last_insn = insn;
3012 /* Add INSN into the doubly-linked list after insn AFTER. This and
3013 the next should be the only functions called to insert an insn once
3014 delay slots have been filled since only they know how to update a
3015 SEQUENCE. */
3017 void
3018 add_insn_after (insn, after)
3019 rtx insn, after;
3021 rtx next = NEXT_INSN (after);
3022 basic_block bb;
3024 if (optimize && INSN_DELETED_P (after))
3025 abort ();
3027 NEXT_INSN (insn) = next;
3028 PREV_INSN (insn) = after;
3030 if (next)
3032 PREV_INSN (next) = insn;
3033 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3034 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3036 else if (last_insn == after)
3037 last_insn = insn;
3038 else
3040 struct sequence_stack *stack = seq_stack;
3041 /* Scan all pending sequences too. */
3042 for (; stack; stack = stack->next)
3043 if (after == stack->last)
3045 stack->last = insn;
3046 break;
3049 if (stack == 0)
3050 abort ();
3053 if (basic_block_for_insn
3054 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3055 && (bb = BLOCK_FOR_INSN (after)))
3057 set_block_for_insn (insn, bb);
3058 /* Should not happen as first in the BB is always
3059 eigther NOTE or LABEL. */
3060 if (bb->end == after
3061 /* Avoid clobbering of structure when creating new BB. */
3062 && GET_CODE (insn) != BARRIER
3063 && (GET_CODE (insn) != NOTE
3064 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3065 bb->end = insn;
3068 NEXT_INSN (after) = insn;
3069 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3071 rtx sequence = PATTERN (after);
3072 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3076 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3077 the previous should be the only functions called to insert an insn once
3078 delay slots have been filled since only they know how to update a
3079 SEQUENCE. */
3081 void
3082 add_insn_before (insn, before)
3083 rtx insn, before;
3085 rtx prev = PREV_INSN (before);
3086 basic_block bb;
3088 if (optimize && INSN_DELETED_P (before))
3089 abort ();
3091 PREV_INSN (insn) = prev;
3092 NEXT_INSN (insn) = before;
3094 if (prev)
3096 NEXT_INSN (prev) = insn;
3097 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3099 rtx sequence = PATTERN (prev);
3100 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3103 else if (first_insn == before)
3104 first_insn = insn;
3105 else
3107 struct sequence_stack *stack = seq_stack;
3108 /* Scan all pending sequences too. */
3109 for (; stack; stack = stack->next)
3110 if (before == stack->first)
3112 stack->first = insn;
3113 break;
3116 if (stack == 0)
3117 abort ();
3120 if (basic_block_for_insn
3121 && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
3122 && (bb = BLOCK_FOR_INSN (before)))
3124 set_block_for_insn (insn, bb);
3125 /* Should not happen as first in the BB is always
3126 eigther NOTE or LABEl. */
3127 if (bb->head == insn
3128 /* Avoid clobbering of structure when creating new BB. */
3129 && GET_CODE (insn) != BARRIER
3130 && (GET_CODE (insn) != NOTE
3131 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3132 abort ();
3135 PREV_INSN (before) = insn;
3136 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3137 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3140 /* Remove an insn from its doubly-linked list. This function knows how
3141 to handle sequences. */
3142 void
3143 remove_insn (insn)
3144 rtx insn;
3146 rtx next = NEXT_INSN (insn);
3147 rtx prev = PREV_INSN (insn);
3148 basic_block bb;
3150 if (prev)
3152 NEXT_INSN (prev) = next;
3153 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3155 rtx sequence = PATTERN (prev);
3156 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3159 else if (first_insn == insn)
3160 first_insn = next;
3161 else
3163 struct sequence_stack *stack = seq_stack;
3164 /* Scan all pending sequences too. */
3165 for (; stack; stack = stack->next)
3166 if (insn == stack->first)
3168 stack->first = next;
3169 break;
3172 if (stack == 0)
3173 abort ();
3176 if (next)
3178 PREV_INSN (next) = prev;
3179 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3180 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3182 else if (last_insn == insn)
3183 last_insn = prev;
3184 else
3186 struct sequence_stack *stack = seq_stack;
3187 /* Scan all pending sequences too. */
3188 for (; stack; stack = stack->next)
3189 if (insn == stack->last)
3191 stack->last = prev;
3192 break;
3195 if (stack == 0)
3196 abort ();
3198 if (basic_block_for_insn
3199 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
3200 && (bb = BLOCK_FOR_INSN (insn)))
3202 if (bb->head == insn)
3204 /* Never ever delete the basic block note without deleting whole basic
3205 block. */
3206 if (GET_CODE (insn) == NOTE)
3207 abort ();
3208 bb->head = next;
3210 if (bb->end == insn)
3211 bb->end = prev;
3215 /* Delete all insns made since FROM.
3216 FROM becomes the new last instruction. */
3218 void
3219 delete_insns_since (from)
3220 rtx from;
3222 if (from == 0)
3223 first_insn = 0;
3224 else
3225 NEXT_INSN (from) = 0;
3226 last_insn = from;
3229 /* This function is deprecated, please use sequences instead.
3231 Move a consecutive bunch of insns to a different place in the chain.
3232 The insns to be moved are those between FROM and TO.
3233 They are moved to a new position after the insn AFTER.
3234 AFTER must not be FROM or TO or any insn in between.
3236 This function does not know about SEQUENCEs and hence should not be
3237 called after delay-slot filling has been done. */
3239 void
3240 reorder_insns_nobb (from, to, after)
3241 rtx from, to, after;
3243 /* Splice this bunch out of where it is now. */
3244 if (PREV_INSN (from))
3245 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3246 if (NEXT_INSN (to))
3247 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3248 if (last_insn == to)
3249 last_insn = PREV_INSN (from);
3250 if (first_insn == from)
3251 first_insn = NEXT_INSN (to);
3253 /* Make the new neighbors point to it and it to them. */
3254 if (NEXT_INSN (after))
3255 PREV_INSN (NEXT_INSN (after)) = to;
3257 NEXT_INSN (to) = NEXT_INSN (after);
3258 PREV_INSN (from) = after;
3259 NEXT_INSN (after) = from;
3260 if (after == last_insn)
3261 last_insn = to;
3264 /* Same as function above, but take care to update BB boundaries. */
3265 void
3266 reorder_insns (from, to, after)
3267 rtx from, to, after;
3269 rtx prev = PREV_INSN (from);
3270 basic_block bb, bb2;
3272 reorder_insns_nobb (from, to, after);
3274 if (basic_block_for_insn
3275 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3276 && (bb = BLOCK_FOR_INSN (after)))
3278 rtx x;
3280 if (basic_block_for_insn
3281 && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
3282 && (bb2 = BLOCK_FOR_INSN (from)))
3284 if (bb2->end == to)
3285 bb2->end = prev;
3288 if (bb->end == after)
3289 bb->end = to;
3291 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3292 set_block_for_insn (x, bb);
3296 /* Return the line note insn preceding INSN. */
3298 static rtx
3299 find_line_note (insn)
3300 rtx insn;
3302 if (no_line_numbers)
3303 return 0;
3305 for (; insn; insn = PREV_INSN (insn))
3306 if (GET_CODE (insn) == NOTE
3307 && NOTE_LINE_NUMBER (insn) >= 0)
3308 break;
3310 return insn;
3313 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3314 of the moved insns when debugging. This may insert a note between AFTER
3315 and FROM, and another one after TO. */
3317 void
3318 reorder_insns_with_line_notes (from, to, after)
3319 rtx from, to, after;
3321 rtx from_line = find_line_note (from);
3322 rtx after_line = find_line_note (after);
3324 reorder_insns (from, to, after);
3326 if (from_line == after_line)
3327 return;
3329 if (from_line)
3330 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3331 NOTE_LINE_NUMBER (from_line),
3332 after);
3333 if (after_line)
3334 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3335 NOTE_LINE_NUMBER (after_line),
3336 to);
3339 /* Remove unnecessary notes from the instruction stream. */
3341 void
3342 remove_unnecessary_notes ()
3344 rtx block_stack = NULL_RTX;
3345 rtx eh_stack = NULL_RTX;
3346 rtx insn;
3347 rtx next;
3348 rtx tmp;
3350 /* We must not remove the first instruction in the function because
3351 the compiler depends on the first instruction being a note. */
3352 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3354 /* Remember what's next. */
3355 next = NEXT_INSN (insn);
3357 /* We're only interested in notes. */
3358 if (GET_CODE (insn) != NOTE)
3359 continue;
3361 switch (NOTE_LINE_NUMBER (insn))
3363 case NOTE_INSN_DELETED:
3364 remove_insn (insn);
3365 break;
3367 case NOTE_INSN_EH_REGION_BEG:
3368 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3369 break;
3371 case NOTE_INSN_EH_REGION_END:
3372 /* Too many end notes. */
3373 if (eh_stack == NULL_RTX)
3374 abort ();
3375 /* Mismatched nesting. */
3376 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3377 abort ();
3378 tmp = eh_stack;
3379 eh_stack = XEXP (eh_stack, 1);
3380 free_INSN_LIST_node (tmp);
3381 break;
3383 case NOTE_INSN_BLOCK_BEG:
3384 /* By now, all notes indicating lexical blocks should have
3385 NOTE_BLOCK filled in. */
3386 if (NOTE_BLOCK (insn) == NULL_TREE)
3387 abort ();
3388 block_stack = alloc_INSN_LIST (insn, block_stack);
3389 break;
3391 case NOTE_INSN_BLOCK_END:
3392 /* Too many end notes. */
3393 if (block_stack == NULL_RTX)
3394 abort ();
3395 /* Mismatched nesting. */
3396 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3397 abort ();
3398 tmp = block_stack;
3399 block_stack = XEXP (block_stack, 1);
3400 free_INSN_LIST_node (tmp);
3402 /* Scan back to see if there are any non-note instructions
3403 between INSN and the beginning of this block. If not,
3404 then there is no PC range in the generated code that will
3405 actually be in this block, so there's no point in
3406 remembering the existence of the block. */
3407 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3409 /* This block contains a real instruction. Note that we
3410 don't include labels; if the only thing in the block
3411 is a label, then there are still no PC values that
3412 lie within the block. */
3413 if (INSN_P (tmp))
3414 break;
3416 /* We're only interested in NOTEs. */
3417 if (GET_CODE (tmp) != NOTE)
3418 continue;
3420 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3422 /* We just verified that this BLOCK matches us with
3423 the block_stack check above. Never delete the
3424 BLOCK for the outermost scope of the function; we
3425 can refer to names from that scope even if the
3426 block notes are messed up. */
3427 if (! is_body_block (NOTE_BLOCK (insn))
3428 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3430 remove_insn (tmp);
3431 remove_insn (insn);
3433 break;
3435 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3436 /* There's a nested block. We need to leave the
3437 current block in place since otherwise the debugger
3438 wouldn't be able to show symbols from our block in
3439 the nested block. */
3440 break;
3445 /* Too many begin notes. */
3446 if (block_stack || eh_stack)
3447 abort ();
3451 /* Emit an insn of given code and pattern
3452 at a specified place within the doubly-linked list. */
3454 /* Make an instruction with body PATTERN
3455 and output it before the instruction BEFORE. */
3458 emit_insn_before (pattern, before)
3459 rtx pattern, before;
3461 rtx insn = before;
3463 if (GET_CODE (pattern) == SEQUENCE)
3465 int i;
3467 for (i = 0; i < XVECLEN (pattern, 0); i++)
3469 insn = XVECEXP (pattern, 0, i);
3470 add_insn_before (insn, before);
3473 else
3475 insn = make_insn_raw (pattern);
3476 add_insn_before (insn, before);
3479 return insn;
3482 /* Make an instruction with body PATTERN and code JUMP_INSN
3483 and output it before the instruction BEFORE. */
3486 emit_jump_insn_before (pattern, before)
3487 rtx pattern, before;
3489 rtx insn;
3491 if (GET_CODE (pattern) == SEQUENCE)
3492 insn = emit_insn_before (pattern, before);
3493 else
3495 insn = make_jump_insn_raw (pattern);
3496 add_insn_before (insn, before);
3499 return insn;
3502 /* Make an instruction with body PATTERN and code CALL_INSN
3503 and output it before the instruction BEFORE. */
3506 emit_call_insn_before (pattern, before)
3507 rtx pattern, before;
3509 rtx insn;
3511 if (GET_CODE (pattern) == SEQUENCE)
3512 insn = emit_insn_before (pattern, before);
3513 else
3515 insn = make_call_insn_raw (pattern);
3516 add_insn_before (insn, before);
3517 PUT_CODE (insn, CALL_INSN);
3520 return insn;
3523 /* Make an insn of code BARRIER
3524 and output it before the insn BEFORE. */
3527 emit_barrier_before (before)
3528 rtx before;
3530 rtx insn = rtx_alloc (BARRIER);
3532 INSN_UID (insn) = cur_insn_uid++;
3534 add_insn_before (insn, before);
3535 return insn;
3538 /* Emit the label LABEL before the insn BEFORE. */
3541 emit_label_before (label, before)
3542 rtx label, before;
3544 /* This can be called twice for the same label as a result of the
3545 confusion that follows a syntax error! So make it harmless. */
3546 if (INSN_UID (label) == 0)
3548 INSN_UID (label) = cur_insn_uid++;
3549 add_insn_before (label, before);
3552 return label;
3555 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3558 emit_note_before (subtype, before)
3559 int subtype;
3560 rtx before;
3562 rtx note = rtx_alloc (NOTE);
3563 INSN_UID (note) = cur_insn_uid++;
3564 NOTE_SOURCE_FILE (note) = 0;
3565 NOTE_LINE_NUMBER (note) = subtype;
3567 add_insn_before (note, before);
3568 return note;
3571 /* Make an insn of code INSN with body PATTERN
3572 and output it after the insn AFTER. */
3575 emit_insn_after (pattern, after)
3576 rtx pattern, after;
3578 rtx insn = after;
3580 if (GET_CODE (pattern) == SEQUENCE)
3582 int i;
3584 for (i = 0; i < XVECLEN (pattern, 0); i++)
3586 insn = XVECEXP (pattern, 0, i);
3587 add_insn_after (insn, after);
3588 after = insn;
3591 else
3593 insn = make_insn_raw (pattern);
3594 add_insn_after (insn, after);
3597 return insn;
3600 /* Similar to emit_insn_after, except that line notes are to be inserted so
3601 as to act as if this insn were at FROM. */
3603 void
3604 emit_insn_after_with_line_notes (pattern, after, from)
3605 rtx pattern, after, from;
3607 rtx from_line = find_line_note (from);
3608 rtx after_line = find_line_note (after);
3609 rtx insn = emit_insn_after (pattern, after);
3611 if (from_line)
3612 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3613 NOTE_LINE_NUMBER (from_line),
3614 after);
3616 if (after_line)
3617 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3618 NOTE_LINE_NUMBER (after_line),
3619 insn);
3622 /* Make an insn of code JUMP_INSN with body PATTERN
3623 and output it after the insn AFTER. */
3626 emit_jump_insn_after (pattern, after)
3627 rtx pattern, after;
3629 rtx insn;
3631 if (GET_CODE (pattern) == SEQUENCE)
3632 insn = emit_insn_after (pattern, after);
3633 else
3635 insn = make_jump_insn_raw (pattern);
3636 add_insn_after (insn, after);
3639 return insn;
3642 /* Make an insn of code BARRIER
3643 and output it after the insn AFTER. */
3646 emit_barrier_after (after)
3647 rtx after;
3649 rtx insn = rtx_alloc (BARRIER);
3651 INSN_UID (insn) = cur_insn_uid++;
3653 add_insn_after (insn, after);
3654 return insn;
3657 /* Emit the label LABEL after the insn AFTER. */
3660 emit_label_after (label, after)
3661 rtx label, after;
3663 /* This can be called twice for the same label
3664 as a result of the confusion that follows a syntax error!
3665 So make it harmless. */
3666 if (INSN_UID (label) == 0)
3668 INSN_UID (label) = cur_insn_uid++;
3669 add_insn_after (label, after);
3672 return label;
3675 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3678 emit_note_after (subtype, after)
3679 int subtype;
3680 rtx after;
3682 rtx note = rtx_alloc (NOTE);
3683 INSN_UID (note) = cur_insn_uid++;
3684 NOTE_SOURCE_FILE (note) = 0;
3685 NOTE_LINE_NUMBER (note) = subtype;
3686 add_insn_after (note, after);
3687 return note;
3690 /* Emit a line note for FILE and LINE after the insn AFTER. */
3693 emit_line_note_after (file, line, after)
3694 const char *file;
3695 int line;
3696 rtx after;
3698 rtx note;
3700 if (no_line_numbers && line > 0)
3702 cur_insn_uid++;
3703 return 0;
3706 note = rtx_alloc (NOTE);
3707 INSN_UID (note) = cur_insn_uid++;
3708 NOTE_SOURCE_FILE (note) = file;
3709 NOTE_LINE_NUMBER (note) = line;
3710 add_insn_after (note, after);
3711 return note;
3714 /* Make an insn of code INSN with pattern PATTERN
3715 and add it to the end of the doubly-linked list.
3716 If PATTERN is a SEQUENCE, take the elements of it
3717 and emit an insn for each element.
3719 Returns the last insn emitted. */
3722 emit_insn (pattern)
3723 rtx pattern;
3725 rtx insn = last_insn;
3727 if (GET_CODE (pattern) == SEQUENCE)
3729 int i;
3731 for (i = 0; i < XVECLEN (pattern, 0); i++)
3733 insn = XVECEXP (pattern, 0, i);
3734 add_insn (insn);
3737 else
3739 insn = make_insn_raw (pattern);
3740 add_insn (insn);
3743 return insn;
3746 /* Emit the insns in a chain starting with INSN.
3747 Return the last insn emitted. */
3750 emit_insns (insn)
3751 rtx insn;
3753 rtx last = 0;
3755 while (insn)
3757 rtx next = NEXT_INSN (insn);
3758 add_insn (insn);
3759 last = insn;
3760 insn = next;
3763 return last;
3766 /* Emit the insns in a chain starting with INSN and place them in front of
3767 the insn BEFORE. Return the last insn emitted. */
3770 emit_insns_before (insn, before)
3771 rtx insn;
3772 rtx before;
3774 rtx last = 0;
3776 while (insn)
3778 rtx next = NEXT_INSN (insn);
3779 add_insn_before (insn, before);
3780 last = insn;
3781 insn = next;
3784 return last;
3787 /* Emit the insns in a chain starting with FIRST and place them in back of
3788 the insn AFTER. Return the last insn emitted. */
3791 emit_insns_after (first, after)
3792 rtx first;
3793 rtx after;
3795 rtx last;
3796 rtx after_after;
3797 basic_block bb;
3799 if (!after)
3800 abort ();
3802 if (!first)
3803 return after;
3805 if (basic_block_for_insn
3806 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3807 && (bb = BLOCK_FOR_INSN (after)))
3809 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3810 set_block_for_insn (last, bb);
3811 set_block_for_insn (last, bb);
3812 if (bb->end == after)
3813 bb->end = last;
3815 else
3816 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3817 continue;
3819 after_after = NEXT_INSN (after);
3821 NEXT_INSN (after) = first;
3822 PREV_INSN (first) = after;
3823 NEXT_INSN (last) = after_after;
3824 if (after_after)
3825 PREV_INSN (after_after) = last;
3827 if (after == last_insn)
3828 last_insn = last;
3829 return last;
3832 /* Make an insn of code JUMP_INSN with pattern PATTERN
3833 and add it to the end of the doubly-linked list. */
3836 emit_jump_insn (pattern)
3837 rtx pattern;
3839 if (GET_CODE (pattern) == SEQUENCE)
3840 return emit_insn (pattern);
3841 else
3843 rtx insn = make_jump_insn_raw (pattern);
3844 add_insn (insn);
3845 return insn;
3849 /* Make an insn of code CALL_INSN with pattern PATTERN
3850 and add it to the end of the doubly-linked list. */
3853 emit_call_insn (pattern)
3854 rtx pattern;
3856 if (GET_CODE (pattern) == SEQUENCE)
3857 return emit_insn (pattern);
3858 else
3860 rtx insn = make_call_insn_raw (pattern);
3861 add_insn (insn);
3862 PUT_CODE (insn, CALL_INSN);
3863 return insn;
3867 /* Add the label LABEL to the end of the doubly-linked list. */
3870 emit_label (label)
3871 rtx label;
3873 /* This can be called twice for the same label
3874 as a result of the confusion that follows a syntax error!
3875 So make it harmless. */
3876 if (INSN_UID (label) == 0)
3878 INSN_UID (label) = cur_insn_uid++;
3879 add_insn (label);
3881 return label;
3884 /* Make an insn of code BARRIER
3885 and add it to the end of the doubly-linked list. */
3888 emit_barrier ()
3890 rtx barrier = rtx_alloc (BARRIER);
3891 INSN_UID (barrier) = cur_insn_uid++;
3892 add_insn (barrier);
3893 return barrier;
3896 /* Make an insn of code NOTE
3897 with data-fields specified by FILE and LINE
3898 and add it to the end of the doubly-linked list,
3899 but only if line-numbers are desired for debugging info. */
3902 emit_line_note (file, line)
3903 const char *file;
3904 int line;
3906 set_file_and_line_for_stmt (file, line);
3908 #if 0
3909 if (no_line_numbers)
3910 return 0;
3911 #endif
3913 return emit_note (file, line);
3916 /* Make an insn of code NOTE
3917 with data-fields specified by FILE and LINE
3918 and add it to the end of the doubly-linked list.
3919 If it is a line-number NOTE, omit it if it matches the previous one. */
3922 emit_note (file, line)
3923 const char *file;
3924 int line;
3926 rtx note;
3928 if (line > 0)
3930 if (file && last_filename && !strcmp (file, last_filename)
3931 && line == last_linenum)
3932 return 0;
3933 last_filename = file;
3934 last_linenum = line;
3937 if (no_line_numbers && line > 0)
3939 cur_insn_uid++;
3940 return 0;
3943 note = rtx_alloc (NOTE);
3944 INSN_UID (note) = cur_insn_uid++;
3945 NOTE_SOURCE_FILE (note) = file;
3946 NOTE_LINE_NUMBER (note) = line;
3947 add_insn (note);
3948 return note;
3951 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3954 emit_line_note_force (file, line)
3955 const char *file;
3956 int line;
3958 last_linenum = -1;
3959 return emit_line_note (file, line);
3962 /* Cause next statement to emit a line note even if the line number
3963 has not changed. This is used at the beginning of a function. */
3965 void
3966 force_next_line_note ()
3968 last_linenum = -1;
3971 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3972 note of this type already exists, remove it first. */
3975 set_unique_reg_note (insn, kind, datum)
3976 rtx insn;
3977 enum reg_note kind;
3978 rtx datum;
3980 rtx note = find_reg_note (insn, kind, NULL_RTX);
3982 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
3983 It serves no useful purpose and breaks eliminate_regs. */
3984 if ((kind == REG_EQUAL || kind == REG_EQUIV)
3985 && GET_CODE (datum) == ASM_OPERANDS)
3986 return NULL_RTX;
3988 if (note)
3990 XEXP (note, 0) = datum;
3991 return note;
3994 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3995 return REG_NOTES (insn);
3998 /* Return an indication of which type of insn should have X as a body.
3999 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4001 enum rtx_code
4002 classify_insn (x)
4003 rtx x;
4005 if (GET_CODE (x) == CODE_LABEL)
4006 return CODE_LABEL;
4007 if (GET_CODE (x) == CALL)
4008 return CALL_INSN;
4009 if (GET_CODE (x) == RETURN)
4010 return JUMP_INSN;
4011 if (GET_CODE (x) == SET)
4013 if (SET_DEST (x) == pc_rtx)
4014 return JUMP_INSN;
4015 else if (GET_CODE (SET_SRC (x)) == CALL)
4016 return CALL_INSN;
4017 else
4018 return INSN;
4020 if (GET_CODE (x) == PARALLEL)
4022 int j;
4023 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4024 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4025 return CALL_INSN;
4026 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4027 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4028 return JUMP_INSN;
4029 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4030 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4031 return CALL_INSN;
4033 return INSN;
4036 /* Emit the rtl pattern X as an appropriate kind of insn.
4037 If X is a label, it is simply added into the insn chain. */
4040 emit (x)
4041 rtx x;
4043 enum rtx_code code = classify_insn (x);
4045 if (code == CODE_LABEL)
4046 return emit_label (x);
4047 else if (code == INSN)
4048 return emit_insn (x);
4049 else if (code == JUMP_INSN)
4051 rtx insn = emit_jump_insn (x);
4052 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4053 return emit_barrier ();
4054 return insn;
4056 else if (code == CALL_INSN)
4057 return emit_call_insn (x);
4058 else
4059 abort ();
4062 /* Begin emitting insns to a sequence which can be packaged in an
4063 RTL_EXPR. If this sequence will contain something that might cause
4064 the compiler to pop arguments to function calls (because those
4065 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4066 details), use do_pending_stack_adjust before calling this function.
4067 That will ensure that the deferred pops are not accidentally
4068 emitted in the middle of this sequence. */
4070 void
4071 start_sequence ()
4073 struct sequence_stack *tem;
4075 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4077 tem->next = seq_stack;
4078 tem->first = first_insn;
4079 tem->last = last_insn;
4080 tem->sequence_rtl_expr = seq_rtl_expr;
4082 seq_stack = tem;
4084 first_insn = 0;
4085 last_insn = 0;
4088 /* Similarly, but indicate that this sequence will be placed in T, an
4089 RTL_EXPR. See the documentation for start_sequence for more
4090 information about how to use this function. */
4092 void
4093 start_sequence_for_rtl_expr (t)
4094 tree t;
4096 start_sequence ();
4098 seq_rtl_expr = t;
4101 /* Set up the insn chain starting with FIRST as the current sequence,
4102 saving the previously current one. See the documentation for
4103 start_sequence for more information about how to use this function. */
4105 void
4106 push_to_sequence (first)
4107 rtx first;
4109 rtx last;
4111 start_sequence ();
4113 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4115 first_insn = first;
4116 last_insn = last;
4119 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4121 void
4122 push_to_full_sequence (first, last)
4123 rtx first, last;
4125 start_sequence ();
4126 first_insn = first;
4127 last_insn = last;
4128 /* We really should have the end of the insn chain here. */
4129 if (last && NEXT_INSN (last))
4130 abort ();
4133 /* Set up the outer-level insn chain
4134 as the current sequence, saving the previously current one. */
4136 void
4137 push_topmost_sequence ()
4139 struct sequence_stack *stack, *top = NULL;
4141 start_sequence ();
4143 for (stack = seq_stack; stack; stack = stack->next)
4144 top = stack;
4146 first_insn = top->first;
4147 last_insn = top->last;
4148 seq_rtl_expr = top->sequence_rtl_expr;
4151 /* After emitting to the outer-level insn chain, update the outer-level
4152 insn chain, and restore the previous saved state. */
4154 void
4155 pop_topmost_sequence ()
4157 struct sequence_stack *stack, *top = NULL;
4159 for (stack = seq_stack; stack; stack = stack->next)
4160 top = stack;
4162 top->first = first_insn;
4163 top->last = last_insn;
4164 /* ??? Why don't we save seq_rtl_expr here? */
4166 end_sequence ();
4169 /* After emitting to a sequence, restore previous saved state.
4171 To get the contents of the sequence just made, you must call
4172 `gen_sequence' *before* calling here.
4174 If the compiler might have deferred popping arguments while
4175 generating this sequence, and this sequence will not be immediately
4176 inserted into the instruction stream, use do_pending_stack_adjust
4177 before calling gen_sequence. That will ensure that the deferred
4178 pops are inserted into this sequence, and not into some random
4179 location in the instruction stream. See INHIBIT_DEFER_POP for more
4180 information about deferred popping of arguments. */
4182 void
4183 end_sequence ()
4185 struct sequence_stack *tem = seq_stack;
4187 first_insn = tem->first;
4188 last_insn = tem->last;
4189 seq_rtl_expr = tem->sequence_rtl_expr;
4190 seq_stack = tem->next;
4192 free (tem);
4195 /* This works like end_sequence, but records the old sequence in FIRST
4196 and LAST. */
4198 void
4199 end_full_sequence (first, last)
4200 rtx *first, *last;
4202 *first = first_insn;
4203 *last = last_insn;
4204 end_sequence();
4207 /* Return 1 if currently emitting into a sequence. */
4210 in_sequence_p ()
4212 return seq_stack != 0;
4215 /* Generate a SEQUENCE rtx containing the insns already emitted
4216 to the current sequence.
4218 This is how the gen_... function from a DEFINE_EXPAND
4219 constructs the SEQUENCE that it returns. */
4222 gen_sequence ()
4224 rtx result;
4225 rtx tem;
4226 int i;
4227 int len;
4229 /* Count the insns in the chain. */
4230 len = 0;
4231 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4232 len++;
4234 /* If only one insn, return it rather than a SEQUENCE.
4235 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4236 the case of an empty list.)
4237 We only return the pattern of an insn if its code is INSN and it
4238 has no notes. This ensures that no information gets lost. */
4239 if (len == 1
4240 && ! RTX_FRAME_RELATED_P (first_insn)
4241 && GET_CODE (first_insn) == INSN
4242 /* Don't throw away any reg notes. */
4243 && REG_NOTES (first_insn) == 0)
4244 return PATTERN (first_insn);
4246 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4248 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4249 XVECEXP (result, 0, i) = tem;
4251 return result;
4254 /* Put the various virtual registers into REGNO_REG_RTX. */
4256 void
4257 init_virtual_regs (es)
4258 struct emit_status *es;
4260 rtx *ptr = es->x_regno_reg_rtx;
4261 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4262 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4263 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4264 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4265 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4268 void
4269 clear_emit_caches ()
4271 int i;
4273 /* Clear the start_sequence/gen_sequence cache. */
4274 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4275 sequence_result[i] = 0;
4276 free_insn = 0;
4279 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4280 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4281 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4282 static int copy_insn_n_scratches;
4284 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4285 copied an ASM_OPERANDS.
4286 In that case, it is the original input-operand vector. */
4287 static rtvec orig_asm_operands_vector;
4289 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4290 copied an ASM_OPERANDS.
4291 In that case, it is the copied input-operand vector. */
4292 static rtvec copy_asm_operands_vector;
4294 /* Likewise for the constraints vector. */
4295 static rtvec orig_asm_constraints_vector;
4296 static rtvec copy_asm_constraints_vector;
4298 /* Recursively create a new copy of an rtx for copy_insn.
4299 This function differs from copy_rtx in that it handles SCRATCHes and
4300 ASM_OPERANDs properly.
4301 Normally, this function is not used directly; use copy_insn as front end.
4302 However, you could first copy an insn pattern with copy_insn and then use
4303 this function afterwards to properly copy any REG_NOTEs containing
4304 SCRATCHes. */
4307 copy_insn_1 (orig)
4308 rtx orig;
4310 rtx copy;
4311 int i, j;
4312 RTX_CODE code;
4313 const char *format_ptr;
4315 code = GET_CODE (orig);
4317 switch (code)
4319 case REG:
4320 case QUEUED:
4321 case CONST_INT:
4322 case CONST_DOUBLE:
4323 case SYMBOL_REF:
4324 case CODE_LABEL:
4325 case PC:
4326 case CC0:
4327 case ADDRESSOF:
4328 return orig;
4330 case SCRATCH:
4331 for (i = 0; i < copy_insn_n_scratches; i++)
4332 if (copy_insn_scratch_in[i] == orig)
4333 return copy_insn_scratch_out[i];
4334 break;
4336 case CONST:
4337 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4338 a LABEL_REF, it isn't sharable. */
4339 if (GET_CODE (XEXP (orig, 0)) == PLUS
4340 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4341 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4342 return orig;
4343 break;
4345 /* A MEM with a constant address is not sharable. The problem is that
4346 the constant address may need to be reloaded. If the mem is shared,
4347 then reloading one copy of this mem will cause all copies to appear
4348 to have been reloaded. */
4350 default:
4351 break;
4354 copy = rtx_alloc (code);
4356 /* Copy the various flags, and other information. We assume that
4357 all fields need copying, and then clear the fields that should
4358 not be copied. That is the sensible default behavior, and forces
4359 us to explicitly document why we are *not* copying a flag. */
4360 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4362 /* We do not copy the USED flag, which is used as a mark bit during
4363 walks over the RTL. */
4364 copy->used = 0;
4366 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4367 if (GET_RTX_CLASS (code) == 'i')
4369 copy->jump = 0;
4370 copy->call = 0;
4371 copy->frame_related = 0;
4374 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4376 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4378 copy->fld[i] = orig->fld[i];
4379 switch (*format_ptr++)
4381 case 'e':
4382 if (XEXP (orig, i) != NULL)
4383 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4384 break;
4386 case 'E':
4387 case 'V':
4388 if (XVEC (orig, i) == orig_asm_constraints_vector)
4389 XVEC (copy, i) = copy_asm_constraints_vector;
4390 else if (XVEC (orig, i) == orig_asm_operands_vector)
4391 XVEC (copy, i) = copy_asm_operands_vector;
4392 else if (XVEC (orig, i) != NULL)
4394 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4395 for (j = 0; j < XVECLEN (copy, i); j++)
4396 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4398 break;
4400 case 't':
4401 case 'w':
4402 case 'i':
4403 case 's':
4404 case 'S':
4405 case 'u':
4406 case '0':
4407 /* These are left unchanged. */
4408 break;
4410 default:
4411 abort ();
4415 if (code == SCRATCH)
4417 i = copy_insn_n_scratches++;
4418 if (i >= MAX_RECOG_OPERANDS)
4419 abort ();
4420 copy_insn_scratch_in[i] = orig;
4421 copy_insn_scratch_out[i] = copy;
4423 else if (code == ASM_OPERANDS)
4425 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4426 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4427 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4428 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4431 return copy;
4434 /* Create a new copy of an rtx.
4435 This function differs from copy_rtx in that it handles SCRATCHes and
4436 ASM_OPERANDs properly.
4437 INSN doesn't really have to be a full INSN; it could be just the
4438 pattern. */
4440 copy_insn (insn)
4441 rtx insn;
4443 copy_insn_n_scratches = 0;
4444 orig_asm_operands_vector = 0;
4445 orig_asm_constraints_vector = 0;
4446 copy_asm_operands_vector = 0;
4447 copy_asm_constraints_vector = 0;
4448 return copy_insn_1 (insn);
4451 /* Initialize data structures and variables in this file
4452 before generating rtl for each function. */
4454 void
4455 init_emit ()
4457 struct function *f = cfun;
4459 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4460 first_insn = NULL;
4461 last_insn = NULL;
4462 seq_rtl_expr = NULL;
4463 cur_insn_uid = 1;
4464 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4465 last_linenum = 0;
4466 last_filename = 0;
4467 first_label_num = label_num;
4468 last_label_num = 0;
4469 seq_stack = NULL;
4471 clear_emit_caches ();
4473 /* Init the tables that describe all the pseudo regs. */
4475 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4477 f->emit->regno_pointer_align
4478 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4479 sizeof (unsigned char));
4481 regno_reg_rtx
4482 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4484 f->emit->regno_decl
4485 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4487 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4488 init_virtual_regs (f->emit);
4490 /* Indicate that the virtual registers and stack locations are
4491 all pointers. */
4492 REG_POINTER (stack_pointer_rtx) = 1;
4493 REG_POINTER (frame_pointer_rtx) = 1;
4494 REG_POINTER (hard_frame_pointer_rtx) = 1;
4495 REG_POINTER (arg_pointer_rtx) = 1;
4497 REG_POINTER (virtual_incoming_args_rtx) = 1;
4498 REG_POINTER (virtual_stack_vars_rtx) = 1;
4499 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4500 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4501 REG_POINTER (virtual_cfa_rtx) = 1;
4503 #ifdef STACK_BOUNDARY
4504 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4505 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4506 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4507 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4509 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4510 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4511 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4512 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4513 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4514 #endif
4516 #ifdef INIT_EXPANDERS
4517 INIT_EXPANDERS;
4518 #endif
4521 /* Mark SS for GC. */
4523 static void
4524 mark_sequence_stack (ss)
4525 struct sequence_stack *ss;
4527 while (ss)
4529 ggc_mark_rtx (ss->first);
4530 ggc_mark_tree (ss->sequence_rtl_expr);
4531 ss = ss->next;
4535 /* Mark ES for GC. */
4537 void
4538 mark_emit_status (es)
4539 struct emit_status *es;
4541 rtx *r;
4542 tree *t;
4543 int i;
4545 if (es == 0)
4546 return;
4548 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4549 t = es->regno_decl;
4550 i > 0; --i, ++r, ++t)
4552 ggc_mark_rtx (*r);
4553 ggc_mark_tree (*t);
4556 mark_sequence_stack (es->sequence_stack);
4557 ggc_mark_tree (es->sequence_rtl_expr);
4558 ggc_mark_rtx (es->x_first_insn);
4561 /* Create some permanent unique rtl objects shared between all functions.
4562 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4564 void
4565 init_emit_once (line_numbers)
4566 int line_numbers;
4568 int i;
4569 enum machine_mode mode;
4570 enum machine_mode double_mode;
4572 /* Initialize the CONST_INT and memory attribute hash tables. */
4573 const_int_htab = htab_create (37, const_int_htab_hash,
4574 const_int_htab_eq, NULL);
4575 ggc_add_deletable_htab (const_int_htab, 0, 0);
4577 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4578 mem_attrs_htab_eq, NULL);
4579 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4581 no_line_numbers = ! line_numbers;
4583 /* Compute the word and byte modes. */
4585 byte_mode = VOIDmode;
4586 word_mode = VOIDmode;
4587 double_mode = VOIDmode;
4589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4590 mode = GET_MODE_WIDER_MODE (mode))
4592 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4593 && byte_mode == VOIDmode)
4594 byte_mode = mode;
4596 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4597 && word_mode == VOIDmode)
4598 word_mode = mode;
4601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4602 mode = GET_MODE_WIDER_MODE (mode))
4604 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4605 && double_mode == VOIDmode)
4606 double_mode = mode;
4609 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4611 /* Assign register numbers to the globally defined register rtx.
4612 This must be done at runtime because the register number field
4613 is in a union and some compilers can't initialize unions. */
4615 pc_rtx = gen_rtx (PC, VOIDmode);
4616 cc0_rtx = gen_rtx (CC0, VOIDmode);
4617 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4618 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4619 if (hard_frame_pointer_rtx == 0)
4620 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4621 HARD_FRAME_POINTER_REGNUM);
4622 if (arg_pointer_rtx == 0)
4623 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4624 virtual_incoming_args_rtx =
4625 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4626 virtual_stack_vars_rtx =
4627 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4628 virtual_stack_dynamic_rtx =
4629 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4630 virtual_outgoing_args_rtx =
4631 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4632 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4634 /* These rtx must be roots if GC is enabled. */
4635 ggc_add_rtx_root (global_rtl, GR_MAX);
4637 #ifdef INIT_EXPANDERS
4638 /* This is to initialize {init|mark|free}_machine_status before the first
4639 call to push_function_context_to. This is needed by the Chill front
4640 end which calls push_function_context_to before the first cal to
4641 init_function_start. */
4642 INIT_EXPANDERS;
4643 #endif
4645 /* Create the unique rtx's for certain rtx codes and operand values. */
4647 /* Don't use gen_rtx here since gen_rtx in this case
4648 tries to use these variables. */
4649 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4650 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4651 gen_rtx_raw_CONST_INT (VOIDmode, i);
4652 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4654 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4655 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4656 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4657 else
4658 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4660 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4661 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4662 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4663 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4665 for (i = 0; i <= 2; i++)
4667 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4668 mode = GET_MODE_WIDER_MODE (mode))
4670 rtx tem = rtx_alloc (CONST_DOUBLE);
4671 union real_extract u;
4673 /* Zero any holes in a structure. */
4674 memset ((char *) &u, 0, sizeof u);
4675 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4677 /* Avoid trailing garbage in the rtx. */
4678 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4679 CONST_DOUBLE_LOW (tem) = 0;
4680 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4681 CONST_DOUBLE_HIGH (tem) = 0;
4683 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
4684 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4685 PUT_MODE (tem, mode);
4687 const_tiny_rtx[i][(int) mode] = tem;
4690 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4692 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4693 mode = GET_MODE_WIDER_MODE (mode))
4694 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4696 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4697 mode != VOIDmode;
4698 mode = GET_MODE_WIDER_MODE (mode))
4699 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4702 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4703 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4704 const_tiny_rtx[0][i] = const0_rtx;
4706 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4707 if (STORE_FLAG_VALUE == 1)
4708 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4710 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4711 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4712 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4713 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4714 ggc_add_rtx_root (&const_true_rtx, 1);
4716 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4717 return_address_pointer_rtx
4718 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4719 #endif
4721 #ifdef STRUCT_VALUE
4722 struct_value_rtx = STRUCT_VALUE;
4723 #else
4724 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4725 #endif
4727 #ifdef STRUCT_VALUE_INCOMING
4728 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4729 #else
4730 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4731 struct_value_incoming_rtx
4732 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4733 #else
4734 struct_value_incoming_rtx = struct_value_rtx;
4735 #endif
4736 #endif
4738 #ifdef STATIC_CHAIN_REGNUM
4739 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4741 #ifdef STATIC_CHAIN_INCOMING_REGNUM
4742 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4743 static_chain_incoming_rtx
4744 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4745 else
4746 #endif
4747 static_chain_incoming_rtx = static_chain_rtx;
4748 #endif
4750 #ifdef STATIC_CHAIN
4751 static_chain_rtx = STATIC_CHAIN;
4753 #ifdef STATIC_CHAIN_INCOMING
4754 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4755 #else
4756 static_chain_incoming_rtx = static_chain_rtx;
4757 #endif
4758 #endif
4760 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4761 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
4763 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4764 ggc_add_rtx_root (&struct_value_rtx, 1);
4765 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4766 ggc_add_rtx_root (&static_chain_rtx, 1);
4767 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4768 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
4771 /* Query and clear/ restore no_line_numbers. This is used by the
4772 switch / case handling in stmt.c to give proper line numbers in
4773 warnings about unreachable code. */
4776 force_line_numbers ()
4778 int old = no_line_numbers;
4780 no_line_numbers = 0;
4781 if (old)
4782 force_next_line_note ();
4783 return old;
4786 void
4787 restore_line_number_status (old_value)
4788 int old_value;
4790 no_line_numbers = old_value;