Correct semantics restrictions checking in throw-expression.
[official-gcc.git] / gcc / emit-rtl.c
blob72dd986f44f8a97c749155cd9e37b36b39081034
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
58 /* Commonly used modes. */
60 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
61 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
62 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
63 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
66 /* This is *not* reset after each function. It gives each CODE_LABEL
67 in the entire compilation a unique label number. */
69 static int label_num = 1;
71 /* Highest label number in current function.
72 Zero means use the value of label_num instead.
73 This is nonzero only when belatedly compiling an inline function. */
75 static int last_label_num;
77 /* Value label_num had when set_new_first_and_last_label_number was called.
78 If label_num has not changed since then, last_label_num is valid. */
80 static int base_label_num;
82 /* Nonzero means do not generate NOTEs for source line numbers. */
84 static int no_line_numbers;
86 /* Commonly used rtx's, so that we only need space for one copy.
87 These are initialized once for the entire compilation.
88 All of these except perhaps the floating-point CONST_DOUBLEs
89 are unique; no other rtx-object will be equal to any of these. */
91 rtx global_rtl[GR_MAX];
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx. */
97 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99 rtx const_true_rtx;
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
106 /* All references to the following fixed hard registers go through
107 these unique rtl objects. On machines where the frame-pointer and
108 arg-pointer are the same register, they use the same unique object.
110 After register allocation, other rtl objects which used to be pseudo-regs
111 may be clobbered to refer to the frame-pointer register.
112 But references that were originally to the frame-pointer can be
113 distinguished from the others because they contain frame_pointer_rtx.
115 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
116 tricky: until register elimination has taken place hard_frame_pointer_rtx
117 should be used if it is being set, and frame_pointer_rtx otherwise. After
118 register elimination hard_frame_pointer_rtx should always be used.
119 On machines where the two registers are same (most) then these are the
120 same.
122 In an inline procedure, the stack and frame pointer rtxs may not be
123 used for anything else. */
124 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
125 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
126 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
127 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
128 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
130 /* This is used to implement __builtin_return_address for some machines.
131 See for instance the MIPS port. */
132 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
134 /* We make one copy of (const_int C) where C is in
135 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
136 to save space during the compilation and simplify comparisons of
137 integers. */
139 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
141 /* A hash table storing CONST_INTs whose absolute value is greater
142 than MAX_SAVED_CONST_INT. */
144 static htab_t const_int_htab;
146 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
147 shortly thrown away. We use two mechanisms to prevent this waste:
149 For sizes up to 5 elements, we keep a SEQUENCE and its associated
150 rtvec for use by gen_sequence. One entry for each size is
151 sufficient because most cases are calls to gen_sequence followed by
152 immediately emitting the SEQUENCE. Reuse is safe since emitting a
153 sequence is destructive on the insn in it anyway and hence can't be
154 redone.
156 We do not bother to save this cached data over nested function calls.
157 Instead, we just reinitialize them. */
159 #define SEQUENCE_RESULT_SIZE 5
161 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
163 /* During RTL generation, we also keep a list of free INSN rtl codes. */
164 static rtx free_insn;
166 #define first_insn (cfun->emit->x_first_insn)
167 #define last_insn (cfun->emit->x_last_insn)
168 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
169 #define last_linenum (cfun->emit->x_last_linenum)
170 #define last_filename (cfun->emit->x_last_filename)
171 #define first_label_num (cfun->emit->x_first_label_num)
173 static rtx make_jump_insn_raw PARAMS ((rtx));
174 static rtx make_call_insn_raw PARAMS ((rtx));
175 static rtx find_line_note PARAMS ((rtx));
176 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
177 static void unshare_all_rtl_1 PARAMS ((rtx));
178 static void unshare_all_decls PARAMS ((tree));
179 static void reset_used_decls PARAMS ((tree));
180 static hashval_t const_int_htab_hash PARAMS ((const void *));
181 static int const_int_htab_eq PARAMS ((const void *,
182 const void *));
183 static int rtx_htab_mark_1 PARAMS ((void **, void *));
184 static void rtx_htab_mark PARAMS ((void *));
187 /* Returns a hash code for X (which is a really a CONST_INT). */
189 static hashval_t
190 const_int_htab_hash (x)
191 const void *x;
193 return (hashval_t) INTVAL ((const struct rtx_def *) x);
196 /* Returns non-zero if the value represented by X (which is really a
197 CONST_INT) is the same as that given by Y (which is really a
198 HOST_WIDE_INT *). */
200 static int
201 const_int_htab_eq (x, y)
202 const void *x;
203 const void *y;
205 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
208 /* Mark the hash-table element X (which is really a pointer to an
209 rtx). */
211 static int
212 rtx_htab_mark_1 (x, data)
213 void **x;
214 void *data ATTRIBUTE_UNUSED;
216 ggc_mark_rtx (*x);
217 return 1;
220 /* Mark all the elements of HTAB (which is really an htab_t full of
221 rtxs). */
223 static void
224 rtx_htab_mark (htab)
225 void *htab;
227 htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
230 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
231 don't attempt to share with the various global pieces of rtl (such as
232 frame_pointer_rtx). */
235 gen_raw_REG (mode, regno)
236 enum machine_mode mode;
237 int regno;
239 rtx x = gen_rtx_raw_REG (mode, regno);
240 ORIGINAL_REGNO (x) = regno;
241 return x;
244 /* There are some RTL codes that require special attention; the generation
245 functions do the raw handling. If you add to this list, modify
246 special_rtx in gengenrtl.c as well. */
249 gen_rtx_CONST_INT (mode, arg)
250 enum machine_mode mode ATTRIBUTE_UNUSED;
251 HOST_WIDE_INT arg;
253 void **slot;
255 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
256 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
258 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
259 if (const_true_rtx && arg == STORE_FLAG_VALUE)
260 return const_true_rtx;
261 #endif
263 /* Look up the CONST_INT in the hash table. */
264 slot = htab_find_slot_with_hash (const_int_htab, &arg,
265 (hashval_t) arg, INSERT);
266 if (*slot == 0)
267 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
269 return (rtx) *slot;
272 /* CONST_DOUBLEs needs special handling because their length is known
273 only at run-time. */
276 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
277 enum machine_mode mode;
278 rtx arg0;
279 HOST_WIDE_INT arg1, arg2;
281 rtx r = rtx_alloc (CONST_DOUBLE);
282 int i;
284 PUT_MODE (r, mode);
285 XEXP (r, 0) = arg0;
286 X0EXP (r, 1) = NULL_RTX;
287 XWINT (r, 2) = arg1;
288 XWINT (r, 3) = arg2;
290 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
291 XWINT (r, i) = 0;
293 return r;
297 gen_rtx_REG (mode, regno)
298 enum machine_mode mode;
299 int regno;
301 /* In case the MD file explicitly references the frame pointer, have
302 all such references point to the same frame pointer. This is
303 used during frame pointer elimination to distinguish the explicit
304 references to these registers from pseudos that happened to be
305 assigned to them.
307 If we have eliminated the frame pointer or arg pointer, we will
308 be using it as a normal register, for example as a spill
309 register. In such cases, we might be accessing it in a mode that
310 is not Pmode and therefore cannot use the pre-allocated rtx.
312 Also don't do this when we are making new REGs in reload, since
313 we don't want to get confused with the real pointers. */
315 if (mode == Pmode && !reload_in_progress)
317 if (regno == FRAME_POINTER_REGNUM)
318 return frame_pointer_rtx;
319 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
320 if (regno == HARD_FRAME_POINTER_REGNUM)
321 return hard_frame_pointer_rtx;
322 #endif
323 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
324 if (regno == ARG_POINTER_REGNUM)
325 return arg_pointer_rtx;
326 #endif
327 #ifdef RETURN_ADDRESS_POINTER_REGNUM
328 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
329 return return_address_pointer_rtx;
330 #endif
331 if (regno == STACK_POINTER_REGNUM)
332 return stack_pointer_rtx;
335 return gen_raw_REG (mode, regno);
339 gen_rtx_MEM (mode, addr)
340 enum machine_mode mode;
341 rtx addr;
343 rtx rt = gen_rtx_raw_MEM (mode, addr);
345 /* This field is not cleared by the mere allocation of the rtx, so
346 we clear it here. */
347 MEM_ALIAS_SET (rt) = 0;
349 return rt;
352 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
354 ** This routine generates an RTX of the size specified by
355 ** <code>, which is an RTX code. The RTX structure is initialized
356 ** from the arguments <element1> through <elementn>, which are
357 ** interpreted according to the specific RTX type's format. The
358 ** special machine mode associated with the rtx (if any) is specified
359 ** in <mode>.
361 ** gen_rtx can be invoked in a way which resembles the lisp-like
362 ** rtx it will generate. For example, the following rtx structure:
364 ** (plus:QI (mem:QI (reg:SI 1))
365 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
367 ** ...would be generated by the following C code:
369 ** gen_rtx (PLUS, QImode,
370 ** gen_rtx (MEM, QImode,
371 ** gen_rtx (REG, SImode, 1)),
372 ** gen_rtx (MEM, QImode,
373 ** gen_rtx (PLUS, SImode,
374 ** gen_rtx (REG, SImode, 2),
375 ** gen_rtx (REG, SImode, 3)))),
378 /*VARARGS2*/
380 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
382 #ifndef ANSI_PROTOTYPES
383 enum rtx_code code;
384 enum machine_mode mode;
385 #endif
386 va_list p;
387 register int i; /* Array indices... */
388 register const char *fmt; /* Current rtx's format... */
389 register rtx rt_val; /* RTX to return to caller... */
391 VA_START (p, mode);
393 #ifndef ANSI_PROTOTYPES
394 code = va_arg (p, enum rtx_code);
395 mode = va_arg (p, enum machine_mode);
396 #endif
398 switch (code)
400 case CONST_INT:
401 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
402 break;
404 case CONST_DOUBLE:
406 rtx arg0 = va_arg (p, rtx);
407 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
408 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
409 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
411 break;
413 case REG:
414 rt_val = gen_rtx_REG (mode, va_arg (p, int));
415 break;
417 case MEM:
418 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
419 break;
421 default:
422 rt_val = rtx_alloc (code); /* Allocate the storage space. */
423 rt_val->mode = mode; /* Store the machine mode... */
425 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
426 for (i = 0; i < GET_RTX_LENGTH (code); i++)
428 switch (*fmt++)
430 case '0': /* Unused field. */
431 break;
433 case 'i': /* An integer? */
434 XINT (rt_val, i) = va_arg (p, int);
435 break;
437 case 'w': /* A wide integer? */
438 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
439 break;
441 case 's': /* A string? */
442 XSTR (rt_val, i) = va_arg (p, char *);
443 break;
445 case 'e': /* An expression? */
446 case 'u': /* An insn? Same except when printing. */
447 XEXP (rt_val, i) = va_arg (p, rtx);
448 break;
450 case 'E': /* An RTX vector? */
451 XVEC (rt_val, i) = va_arg (p, rtvec);
452 break;
454 case 'b': /* A bitmap? */
455 XBITMAP (rt_val, i) = va_arg (p, bitmap);
456 break;
458 case 't': /* A tree? */
459 XTREE (rt_val, i) = va_arg (p, tree);
460 break;
462 default:
463 abort ();
466 break;
469 va_end (p);
470 return rt_val;
473 /* gen_rtvec (n, [rt1, ..., rtn])
475 ** This routine creates an rtvec and stores within it the
476 ** pointers to rtx's which are its arguments.
479 /*VARARGS1*/
480 rtvec
481 gen_rtvec VPARAMS ((int n, ...))
483 #ifndef ANSI_PROTOTYPES
484 int n;
485 #endif
486 int i;
487 va_list p;
488 rtx *vector;
490 VA_START (p, n);
492 #ifndef ANSI_PROTOTYPES
493 n = va_arg (p, int);
494 #endif
496 if (n == 0)
497 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
499 vector = (rtx *) alloca (n * sizeof (rtx));
501 for (i = 0; i < n; i++)
502 vector[i] = va_arg (p, rtx);
503 va_end (p);
505 return gen_rtvec_v (n, vector);
508 rtvec
509 gen_rtvec_v (n, argp)
510 int n;
511 rtx *argp;
513 register int i;
514 register rtvec rt_val;
516 if (n == 0)
517 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
519 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
521 for (i = 0; i < n; i++)
522 rt_val->elem[i] = *argp++;
524 return rt_val;
528 /* Generate a REG rtx for a new pseudo register of mode MODE.
529 This pseudo is assigned the next sequential register number. */
532 gen_reg_rtx (mode)
533 enum machine_mode mode;
535 struct function *f = cfun;
536 register rtx val;
538 /* Don't let anything called after initial flow analysis create new
539 registers. */
540 if (no_new_pseudos)
541 abort ();
543 if (generating_concat_p
544 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
545 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
547 /* For complex modes, don't make a single pseudo.
548 Instead, make a CONCAT of two pseudos.
549 This allows noncontiguous allocation of the real and imaginary parts,
550 which makes much better code. Besides, allocating DCmode
551 pseudos overstrains reload on some machines like the 386. */
552 rtx realpart, imagpart;
553 int size = GET_MODE_UNIT_SIZE (mode);
554 enum machine_mode partmode
555 = mode_for_size (size * BITS_PER_UNIT,
556 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
557 ? MODE_FLOAT : MODE_INT),
560 realpart = gen_reg_rtx (partmode);
561 imagpart = gen_reg_rtx (partmode);
562 return gen_rtx_CONCAT (mode, realpart, imagpart);
565 /* Make sure regno_pointer_align and regno_reg_rtx are large enough
566 to have an element for this pseudo reg number. */
568 if (reg_rtx_no == f->emit->regno_pointer_align_length)
570 int old_size = f->emit->regno_pointer_align_length;
571 rtx *new1;
572 char *new;
573 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
574 memset (new + old_size, 0, old_size);
575 f->emit->regno_pointer_align = (unsigned char *) new;
577 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
578 old_size * 2 * sizeof (rtx));
579 memset (new1 + old_size, 0, old_size * sizeof (rtx));
580 regno_reg_rtx = new1;
582 f->emit->regno_pointer_align_length = old_size * 2;
585 val = gen_raw_REG (mode, reg_rtx_no);
586 regno_reg_rtx[reg_rtx_no++] = val;
587 return val;
590 /* Identify REG (which may be a CONCAT) as a user register. */
592 void
593 mark_user_reg (reg)
594 rtx reg;
596 if (GET_CODE (reg) == CONCAT)
598 REG_USERVAR_P (XEXP (reg, 0)) = 1;
599 REG_USERVAR_P (XEXP (reg, 1)) = 1;
601 else if (GET_CODE (reg) == REG)
602 REG_USERVAR_P (reg) = 1;
603 else
604 abort ();
607 /* Identify REG as a probable pointer register and show its alignment
608 as ALIGN, if nonzero. */
610 void
611 mark_reg_pointer (reg, align)
612 rtx reg;
613 int align;
615 if (! REG_POINTER (reg))
617 REG_POINTER (reg) = 1;
619 if (align)
620 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
622 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
623 /* We can no-longer be sure just how aligned this pointer is */
624 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
627 /* Return 1 plus largest pseudo reg number used in the current function. */
630 max_reg_num ()
632 return reg_rtx_no;
635 /* Return 1 + the largest label number used so far in the current function. */
638 max_label_num ()
640 if (last_label_num && label_num == base_label_num)
641 return last_label_num;
642 return label_num;
645 /* Return first label number used in this function (if any were used). */
648 get_first_label_num ()
650 return first_label_num;
653 /* Return a value representing some low-order bits of X, where the number
654 of low-order bits is given by MODE. Note that no conversion is done
655 between floating-point and fixed-point values, rather, the bit
656 representation is returned.
658 This function handles the cases in common between gen_lowpart, below,
659 and two variants in cse.c and combine.c. These are the cases that can
660 be safely handled at all points in the compilation.
662 If this is not a case we can handle, return 0. */
665 gen_lowpart_common (mode, x)
666 enum machine_mode mode;
667 register rtx x;
669 int word = 0;
671 if (GET_MODE (x) == mode)
672 return x;
674 /* MODE must occupy no more words than the mode of X. */
675 if (GET_MODE (x) != VOIDmode
676 && ((GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
677 > ((GET_MODE_SIZE (GET_MODE (x)) + (UNITS_PER_WORD - 1))
678 / UNITS_PER_WORD)))
679 return 0;
681 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
682 word = ((GET_MODE_SIZE (GET_MODE (x))
683 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
684 / UNITS_PER_WORD);
686 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
687 && (GET_MODE_CLASS (mode) == MODE_INT
688 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
690 /* If we are getting the low-order part of something that has been
691 sign- or zero-extended, we can either just use the object being
692 extended or make a narrower extension. If we want an even smaller
693 piece than the size of the object being extended, call ourselves
694 recursively.
696 This case is used mostly by combine and cse. */
698 if (GET_MODE (XEXP (x, 0)) == mode)
699 return XEXP (x, 0);
700 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
701 return gen_lowpart_common (mode, XEXP (x, 0));
702 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
703 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
705 else if (GET_CODE (x) == SUBREG
706 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
707 || GET_MODE_SIZE (mode) <= UNITS_PER_WORD
708 || GET_MODE_SIZE (mode) == GET_MODE_UNIT_SIZE (GET_MODE (x))))
709 return (GET_MODE (SUBREG_REG (x)) == mode && SUBREG_WORD (x) == 0
710 ? SUBREG_REG (x)
711 : gen_rtx_SUBREG (mode, SUBREG_REG (x), SUBREG_WORD (x) + word));
712 else if (GET_CODE (x) == REG)
714 /* Let the backend decide how many registers to skip. This is needed
715 in particular for Sparc64 where fp regs are smaller than a word. */
716 /* ??? Note that subregs are now ambiguous, in that those against
717 pseudos are sized by the Word Size, while those against hard
718 regs are sized by the underlying register size. Better would be
719 to always interpret the subreg offset parameter as bytes or bits. */
721 if (WORDS_BIG_ENDIAN && REGNO (x) < FIRST_PSEUDO_REGISTER
722 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
723 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
724 - HARD_REGNO_NREGS (REGNO (x), mode));
726 /* If the register is not valid for MODE, return 0. If we don't
727 do this, there is no way to fix up the resulting REG later.
728 But we do do this if the current REG is not valid for its
729 mode. This latter is a kludge, but is required due to the
730 way that parameters are passed on some machines, most
731 notably Sparc. */
732 if (REGNO (x) < FIRST_PSEUDO_REGISTER
733 && ! HARD_REGNO_MODE_OK (REGNO (x) + word, mode)
734 && HARD_REGNO_MODE_OK (REGNO (x), GET_MODE (x)))
735 return 0;
736 else if (REGNO (x) < FIRST_PSEUDO_REGISTER
737 /* integrate.c can't handle parts of a return value register. */
738 && (! REG_FUNCTION_VALUE_P (x)
739 || ! rtx_equal_function_value_matters)
740 #ifdef CLASS_CANNOT_CHANGE_MODE
741 && ! (CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (x))
742 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_INT
743 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_FLOAT
744 && (TEST_HARD_REG_BIT
745 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
746 REGNO (x))))
747 #endif
748 /* We want to keep the stack, frame, and arg pointers
749 special. */
750 && x != frame_pointer_rtx
751 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
752 && x != arg_pointer_rtx
753 #endif
754 && x != stack_pointer_rtx)
755 return gen_rtx_REG (mode, REGNO (x) + word);
756 else
757 return gen_rtx_SUBREG (mode, x, word);
759 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
760 from the low-order part of the constant. */
761 else if ((GET_MODE_CLASS (mode) == MODE_INT
762 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
763 && GET_MODE (x) == VOIDmode
764 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
766 /* If MODE is twice the host word size, X is already the desired
767 representation. Otherwise, if MODE is wider than a word, we can't
768 do this. If MODE is exactly a word, return just one CONST_INT. */
770 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
771 return x;
772 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
773 return 0;
774 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
775 return (GET_CODE (x) == CONST_INT ? x
776 : GEN_INT (CONST_DOUBLE_LOW (x)));
777 else
779 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
780 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
781 : CONST_DOUBLE_LOW (x));
783 /* Sign extend to HOST_WIDE_INT. */
784 val = trunc_int_for_mode (val, mode);
786 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
787 : GEN_INT (val));
791 #ifndef REAL_ARITHMETIC
792 /* If X is an integral constant but we want it in floating-point, it
793 must be the case that we have a union of an integer and a floating-point
794 value. If the machine-parameters allow it, simulate that union here
795 and return the result. The two-word and single-word cases are
796 different. */
798 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
799 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
800 || flag_pretend_float)
801 && GET_MODE_CLASS (mode) == MODE_FLOAT
802 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
803 && GET_CODE (x) == CONST_INT
804 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
806 union {HOST_WIDE_INT i; float d; } u;
808 u.i = INTVAL (x);
809 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
811 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
812 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
813 || flag_pretend_float)
814 && GET_MODE_CLASS (mode) == MODE_FLOAT
815 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
816 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
817 && GET_MODE (x) == VOIDmode
818 && (sizeof (double) * HOST_BITS_PER_CHAR
819 == 2 * HOST_BITS_PER_WIDE_INT))
821 union {HOST_WIDE_INT i[2]; double d; } u;
822 HOST_WIDE_INT low, high;
824 if (GET_CODE (x) == CONST_INT)
825 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
826 else
827 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
829 #ifdef HOST_WORDS_BIG_ENDIAN
830 u.i[0] = high, u.i[1] = low;
831 #else
832 u.i[0] = low, u.i[1] = high;
833 #endif
835 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
838 /* Similarly, if this is converting a floating-point value into a
839 single-word integer. Only do this is the host and target parameters are
840 compatible. */
842 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
843 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
844 || flag_pretend_float)
845 && (GET_MODE_CLASS (mode) == MODE_INT
846 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
847 && GET_CODE (x) == CONST_DOUBLE
848 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
849 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
850 return operand_subword (x, word, 0, GET_MODE (x));
852 /* Similarly, if this is converting a floating-point value into a
853 two-word integer, we can do this one word at a time and make an
854 integer. Only do this is the host and target parameters are
855 compatible. */
857 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
858 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
859 || flag_pretend_float)
860 && (GET_MODE_CLASS (mode) == MODE_INT
861 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
862 && GET_CODE (x) == CONST_DOUBLE
863 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
864 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
866 rtx lowpart
867 = operand_subword (x, word + WORDS_BIG_ENDIAN, 0, GET_MODE (x));
868 rtx highpart
869 = operand_subword (x, word + ! WORDS_BIG_ENDIAN, 0, GET_MODE (x));
871 if (lowpart && GET_CODE (lowpart) == CONST_INT
872 && highpart && GET_CODE (highpart) == CONST_INT)
873 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
875 #else /* ifndef REAL_ARITHMETIC */
877 /* When we have a FP emulator, we can handle all conversions between
878 FP and integer operands. This simplifies reload because it
879 doesn't have to deal with constructs like (subreg:DI
880 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
882 else if (mode == SFmode
883 && GET_CODE (x) == CONST_INT)
885 REAL_VALUE_TYPE r;
886 HOST_WIDE_INT i;
888 i = INTVAL (x);
889 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
890 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
892 else if (mode == DFmode
893 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
894 && GET_MODE (x) == VOIDmode)
896 REAL_VALUE_TYPE r;
897 HOST_WIDE_INT i[2];
898 HOST_WIDE_INT low, high;
900 if (GET_CODE (x) == CONST_INT)
902 low = INTVAL (x);
903 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
905 else
907 low = CONST_DOUBLE_LOW (x);
908 high = CONST_DOUBLE_HIGH (x);
911 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
912 target machine. */
913 if (WORDS_BIG_ENDIAN)
914 i[0] = high, i[1] = low;
915 else
916 i[0] = low, i[1] = high;
918 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
919 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
921 else if ((GET_MODE_CLASS (mode) == MODE_INT
922 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
923 && GET_CODE (x) == CONST_DOUBLE
924 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
926 REAL_VALUE_TYPE r;
927 long i[4]; /* Only the low 32 bits of each 'long' are used. */
928 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
930 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
931 switch (GET_MODE (x))
933 case SFmode:
934 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
935 i[1 - endian] = 0;
936 break;
937 case DFmode:
938 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
939 break;
940 #if LONG_DOUBLE_TYPE_SIZE == 96
941 case XFmode:
942 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
943 i[3-3*endian] = 0;
944 #else
945 case TFmode:
946 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
947 #endif
948 break;
949 default:
950 abort ();
953 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
954 and return it. */
955 #if HOST_BITS_PER_WIDE_INT == 32
956 return immed_double_const (i[endian], i[1 - endian], mode);
957 #else
959 int c;
961 if (HOST_BITS_PER_WIDE_INT != 64)
962 abort ();
964 for (c = 0; c < 4; c++)
965 i[c] &= ~ (0L);
967 switch (GET_MODE (x))
969 case SFmode:
970 case DFmode:
971 return immed_double_const (((unsigned long) i[endian]) |
972 (((HOST_WIDE_INT) i[1-endian]) << 32),
973 0, mode);
974 default:
975 return immed_double_const (((unsigned long) i[endian*3]) |
976 (((HOST_WIDE_INT) i[1+endian]) << 32),
977 ((unsigned long) i[2-endian]) |
978 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
979 mode);
982 #endif
984 #endif /* ifndef REAL_ARITHMETIC */
986 /* Otherwise, we can't do this. */
987 return 0;
990 /* Return the real part (which has mode MODE) of a complex value X.
991 This always comes at the low address in memory. */
994 gen_realpart (mode, x)
995 enum machine_mode mode;
996 register rtx x;
998 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
999 return XEXP (x, 0);
1000 else if (WORDS_BIG_ENDIAN
1001 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1002 && REG_P (x)
1003 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1004 internal_error
1005 ("Can't access real part of complex value in hard register");
1006 else if (WORDS_BIG_ENDIAN)
1007 return gen_highpart (mode, x);
1008 else
1009 return gen_lowpart (mode, x);
1012 /* Return the imaginary part (which has mode MODE) of a complex value X.
1013 This always comes at the high address in memory. */
1016 gen_imagpart (mode, x)
1017 enum machine_mode mode;
1018 register rtx x;
1020 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
1021 return XEXP (x, 1);
1022 else if (WORDS_BIG_ENDIAN)
1023 return gen_lowpart (mode, x);
1024 else if (!WORDS_BIG_ENDIAN
1025 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1026 && REG_P (x)
1027 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1028 internal_error
1029 ("can't access imaginary part of complex value in hard register");
1030 else
1031 return gen_highpart (mode, x);
1034 /* Return 1 iff X, assumed to be a SUBREG,
1035 refers to the real part of the complex value in its containing reg.
1036 Complex values are always stored with the real part in the first word,
1037 regardless of WORDS_BIG_ENDIAN. */
1040 subreg_realpart_p (x)
1041 rtx x;
1043 if (GET_CODE (x) != SUBREG)
1044 abort ();
1046 return ((unsigned int) SUBREG_WORD (x) * UNITS_PER_WORD
1047 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1050 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1051 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1052 least-significant part of X.
1053 MODE specifies how big a part of X to return;
1054 it usually should not be larger than a word.
1055 If X is a MEM whose address is a QUEUED, the value may be so also. */
1058 gen_lowpart (mode, x)
1059 enum machine_mode mode;
1060 register rtx x;
1062 rtx result = gen_lowpart_common (mode, x);
1064 if (result)
1065 return result;
1066 else if (GET_CODE (x) == REG)
1068 /* Must be a hard reg that's not valid in MODE. */
1069 result = gen_lowpart_common (mode, copy_to_reg (x));
1070 if (result == 0)
1071 abort ();
1072 return result;
1074 else if (GET_CODE (x) == MEM)
1076 /* The only additional case we can do is MEM. */
1077 register int offset = 0;
1078 if (WORDS_BIG_ENDIAN)
1079 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1080 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1082 if (BYTES_BIG_ENDIAN)
1083 /* Adjust the address so that the address-after-the-data
1084 is unchanged. */
1085 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1086 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1088 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1090 else if (GET_CODE (x) == ADDRESSOF)
1091 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1092 else
1093 abort ();
1096 /* Like `gen_lowpart', but refer to the most significant part.
1097 This is used to access the imaginary part of a complex number. */
1100 gen_highpart (mode, x)
1101 enum machine_mode mode;
1102 register rtx x;
1104 /* This case loses if X is a subreg. To catch bugs early,
1105 complain if an invalid MODE is used even in other cases. */
1106 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
1107 && GET_MODE_SIZE (mode) != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1108 abort ();
1109 if (GET_CODE (x) == CONST_DOUBLE
1110 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
1111 && GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT
1112 #endif
1114 return GEN_INT (CONST_DOUBLE_HIGH (x) & GET_MODE_MASK (mode));
1115 else if (GET_CODE (x) == CONST_INT)
1117 if (HOST_BITS_PER_WIDE_INT <= BITS_PER_WORD)
1118 return const0_rtx;
1119 return GEN_INT (INTVAL (x) >> (HOST_BITS_PER_WIDE_INT - BITS_PER_WORD));
1121 else if (GET_CODE (x) == MEM)
1123 register int offset = 0;
1124 if (! WORDS_BIG_ENDIAN)
1125 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1126 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1128 if (! BYTES_BIG_ENDIAN
1129 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
1130 offset -= (GET_MODE_SIZE (mode)
1131 - MIN (UNITS_PER_WORD,
1132 GET_MODE_SIZE (GET_MODE (x))));
1134 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1136 else if (GET_CODE (x) == SUBREG)
1138 /* The only time this should occur is when we are looking at a
1139 multi-word item with a SUBREG whose mode is the same as that of the
1140 item. It isn't clear what we would do if it wasn't. */
1141 if (SUBREG_WORD (x) != 0)
1142 abort ();
1143 return gen_highpart (mode, SUBREG_REG (x));
1145 else if (GET_CODE (x) == REG)
1147 int word;
1149 /* Let the backend decide how many registers to skip. This is needed
1150 in particular for sparc64 where fp regs are smaller than a word. */
1151 /* ??? Note that subregs are now ambiguous, in that those against
1152 pseudos are sized by the word size, while those against hard
1153 regs are sized by the underlying register size. Better would be
1154 to always interpret the subreg offset parameter as bytes or bits. */
1156 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
1157 abort ();
1158 else if (WORDS_BIG_ENDIAN)
1159 word = 0;
1160 else if (REGNO (x) < FIRST_PSEUDO_REGISTER)
1161 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
1162 - HARD_REGNO_NREGS (REGNO (x), mode));
1163 else
1164 word = ((GET_MODE_SIZE (GET_MODE (x))
1165 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1166 / UNITS_PER_WORD);
1168 if (REGNO (x) < FIRST_PSEUDO_REGISTER
1169 /* integrate.c can't handle parts of a return value register. */
1170 && (! REG_FUNCTION_VALUE_P (x)
1171 || ! rtx_equal_function_value_matters)
1172 /* We want to keep the stack, frame, and arg pointers special. */
1173 && x != frame_pointer_rtx
1174 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1175 && x != arg_pointer_rtx
1176 #endif
1177 && x != stack_pointer_rtx)
1178 return gen_rtx_REG (mode, REGNO (x) + word);
1179 else
1180 return gen_rtx_SUBREG (mode, x, word);
1182 else
1183 abort ();
1186 /* Return 1 iff X, assumed to be a SUBREG,
1187 refers to the least significant part of its containing reg.
1188 If X is not a SUBREG, always return 1 (it is its own low part!). */
1191 subreg_lowpart_p (x)
1192 rtx x;
1194 if (GET_CODE (x) != SUBREG)
1195 return 1;
1196 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1197 return 0;
1199 if (WORDS_BIG_ENDIAN
1200 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD)
1201 return (SUBREG_WORD (x)
1202 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1203 - MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD))
1204 / UNITS_PER_WORD));
1206 return SUBREG_WORD (x) == 0;
1209 /* Return subword I of operand OP.
1210 The word number, I, is interpreted as the word number starting at the
1211 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1212 otherwise it is the high-order word.
1214 If we cannot extract the required word, we return zero. Otherwise, an
1215 rtx corresponding to the requested word will be returned.
1217 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1218 reload has completed, a valid address will always be returned. After
1219 reload, if a valid address cannot be returned, we return zero.
1221 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1222 it is the responsibility of the caller.
1224 MODE is the mode of OP in case it is a CONST_INT. */
1227 operand_subword (op, i, validate_address, mode)
1228 rtx op;
1229 unsigned int i;
1230 int validate_address;
1231 enum machine_mode mode;
1233 HOST_WIDE_INT val;
1234 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1236 if (mode == VOIDmode)
1237 mode = GET_MODE (op);
1239 if (mode == VOIDmode)
1240 abort ();
1242 /* If OP is narrower than a word, fail. */
1243 if (mode != BLKmode
1244 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1245 return 0;
1247 /* If we want a word outside OP, return zero. */
1248 if (mode != BLKmode
1249 && (i + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1250 return const0_rtx;
1252 /* If OP is already an integer word, return it. */
1253 if (GET_MODE_CLASS (mode) == MODE_INT
1254 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1255 return op;
1257 /* If OP is a REG or SUBREG, we can handle it very simply. */
1258 if (GET_CODE (op) == REG)
1260 /* ??? There is a potential problem with this code. It does not
1261 properly handle extractions of a subword from a hard register
1262 that is larger than word_mode. Presumably the check for
1263 HARD_REGNO_MODE_OK catches these most of these cases. */
1265 /* If OP is a hard register, but OP + I is not a hard register,
1266 then extracting a subword is impossible.
1268 For example, consider if OP is the last hard register and it is
1269 larger than word_mode. If we wanted word N (for N > 0) because a
1270 part of that hard register was known to contain a useful value,
1271 then OP + I would refer to a pseudo, not the hard register we
1272 actually wanted. */
1273 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1274 && REGNO (op) + i >= FIRST_PSEUDO_REGISTER)
1275 return 0;
1277 /* If the register is not valid for MODE, return 0. Note we
1278 have to check both OP and OP + I since they may refer to
1279 different parts of the register file.
1281 Consider if OP refers to the last 96bit FP register and we want
1282 subword 3 because that subword is known to contain a value we
1283 needed. */
1284 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1285 && (! HARD_REGNO_MODE_OK (REGNO (op), word_mode)
1286 || ! HARD_REGNO_MODE_OK (REGNO (op) + i, word_mode)))
1287 return 0;
1288 else if (REGNO (op) >= FIRST_PSEUDO_REGISTER
1289 || (REG_FUNCTION_VALUE_P (op)
1290 && rtx_equal_function_value_matters)
1291 /* We want to keep the stack, frame, and arg pointers
1292 special. */
1293 || op == frame_pointer_rtx
1294 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1295 || op == arg_pointer_rtx
1296 #endif
1297 || op == stack_pointer_rtx)
1298 return gen_rtx_SUBREG (word_mode, op, i);
1299 else
1300 return gen_rtx_REG (word_mode, REGNO (op) + i);
1302 else if (GET_CODE (op) == SUBREG)
1303 return gen_rtx_SUBREG (word_mode, SUBREG_REG (op), i + SUBREG_WORD (op));
1304 else if (GET_CODE (op) == CONCAT)
1306 unsigned int partwords
1307 = GET_MODE_UNIT_SIZE (GET_MODE (op)) / UNITS_PER_WORD;
1309 if (i < partwords)
1310 return operand_subword (XEXP (op, 0), i, validate_address, mode);
1311 return operand_subword (XEXP (op, 1), i - partwords,
1312 validate_address, mode);
1315 /* Form a new MEM at the requested address. */
1316 if (GET_CODE (op) == MEM)
1318 rtx addr = plus_constant (XEXP (op, 0), i * UNITS_PER_WORD);
1319 rtx new;
1321 if (validate_address)
1323 if (reload_completed)
1325 if (! strict_memory_address_p (word_mode, addr))
1326 return 0;
1328 else
1329 addr = memory_address (word_mode, addr);
1332 new = gen_rtx_MEM (word_mode, addr);
1333 MEM_COPY_ATTRIBUTES (new, op);
1334 return new;
1337 /* The only remaining cases are when OP is a constant. If the host and
1338 target floating formats are the same, handling two-word floating
1339 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1340 are defined as returning one or two 32 bit values, respectively,
1341 and not values of BITS_PER_WORD bits. */
1342 #ifdef REAL_ARITHMETIC
1343 /* The output is some bits, the width of the target machine's word.
1344 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1345 host can't. */
1346 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1347 && GET_MODE_CLASS (mode) == MODE_FLOAT
1348 && GET_MODE_BITSIZE (mode) == 64
1349 && GET_CODE (op) == CONST_DOUBLE)
1351 long k[2];
1352 REAL_VALUE_TYPE rv;
1354 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1355 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1357 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1358 which the words are written depends on the word endianness.
1359 ??? This is a potential portability problem and should
1360 be fixed at some point.
1362 We must excercise caution with the sign bit. By definition there
1363 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1364 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1365 So we explicitly mask and sign-extend as necessary. */
1366 if (BITS_PER_WORD == 32)
1368 val = k[i];
1369 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1370 return GEN_INT (val);
1372 #if HOST_BITS_PER_WIDE_INT >= 64
1373 else if (BITS_PER_WORD >= 64 && i == 0)
1375 val = k[! WORDS_BIG_ENDIAN];
1376 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1377 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1378 return GEN_INT (val);
1380 #endif
1381 else if (BITS_PER_WORD == 16)
1383 val = k[i >> 1];
1384 if ((i & 1) == !WORDS_BIG_ENDIAN)
1385 val >>= 16;
1386 val &= 0xffff;
1387 return GEN_INT (val);
1389 else
1390 abort ();
1392 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1393 && GET_MODE_CLASS (mode) == MODE_FLOAT
1394 && GET_MODE_BITSIZE (mode) > 64
1395 && GET_CODE (op) == CONST_DOUBLE)
1397 long k[4];
1398 REAL_VALUE_TYPE rv;
1400 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1401 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1403 if (BITS_PER_WORD == 32)
1405 val = k[i];
1406 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1407 return GEN_INT (val);
1409 #if HOST_BITS_PER_WIDE_INT >= 64
1410 else if (BITS_PER_WORD >= 64 && i <= 1)
1412 val = k[i*2 + ! WORDS_BIG_ENDIAN];
1413 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1414 val |= (HOST_WIDE_INT) k[i*2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1415 return GEN_INT (val);
1417 #endif
1418 else
1419 abort ();
1421 #else /* no REAL_ARITHMETIC */
1422 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1423 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1424 || flag_pretend_float)
1425 && GET_MODE_CLASS (mode) == MODE_FLOAT
1426 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1427 && GET_CODE (op) == CONST_DOUBLE)
1429 /* The constant is stored in the host's word-ordering,
1430 but we want to access it in the target's word-ordering. Some
1431 compilers don't like a conditional inside macro args, so we have two
1432 copies of the return. */
1433 #ifdef HOST_WORDS_BIG_ENDIAN
1434 return GEN_INT (i == WORDS_BIG_ENDIAN
1435 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1436 #else
1437 return GEN_INT (i != WORDS_BIG_ENDIAN
1438 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1439 #endif
1441 #endif /* no REAL_ARITHMETIC */
1443 /* Single word float is a little harder, since single- and double-word
1444 values often do not have the same high-order bits. We have already
1445 verified that we want the only defined word of the single-word value. */
1446 #ifdef REAL_ARITHMETIC
1447 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1448 && GET_MODE_BITSIZE (mode) == 32
1449 && GET_CODE (op) == CONST_DOUBLE)
1451 long l;
1452 REAL_VALUE_TYPE rv;
1454 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1455 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1457 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1458 val = l;
1459 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1461 if (BITS_PER_WORD == 16)
1463 if ((i & 1) == !WORDS_BIG_ENDIAN)
1464 val >>= 16;
1465 val &= 0xffff;
1468 return GEN_INT (val);
1470 #else
1471 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1472 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1473 || flag_pretend_float)
1474 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1475 && GET_MODE_CLASS (mode) == MODE_FLOAT
1476 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1477 && GET_CODE (op) == CONST_DOUBLE)
1479 double d;
1480 union {float f; HOST_WIDE_INT i; } u;
1482 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1484 u.f = d;
1485 return GEN_INT (u.i);
1487 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1488 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1489 || flag_pretend_float)
1490 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1491 && GET_MODE_CLASS (mode) == MODE_FLOAT
1492 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1493 && GET_CODE (op) == CONST_DOUBLE)
1495 double d;
1496 union {double d; HOST_WIDE_INT i; } u;
1498 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1500 u.d = d;
1501 return GEN_INT (u.i);
1503 #endif /* no REAL_ARITHMETIC */
1505 /* The only remaining cases that we can handle are integers.
1506 Convert to proper endianness now since these cases need it.
1507 At this point, i == 0 means the low-order word.
1509 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1510 in general. However, if OP is (const_int 0), we can just return
1511 it for any word. */
1513 if (op == const0_rtx)
1514 return op;
1516 if (GET_MODE_CLASS (mode) != MODE_INT
1517 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1518 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1519 return 0;
1521 if (WORDS_BIG_ENDIAN)
1522 i = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - i;
1524 /* Find out which word on the host machine this value is in and get
1525 it from the constant. */
1526 val = (i / size_ratio == 0
1527 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1528 : (GET_CODE (op) == CONST_INT
1529 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1531 /* Get the value we want into the low bits of val. */
1532 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1533 val = ((val >> ((i % size_ratio) * BITS_PER_WORD)));
1535 val = trunc_int_for_mode (val, word_mode);
1537 return GEN_INT (val);
1540 /* Similar to `operand_subword', but never return 0. If we can't extract
1541 the required subword, put OP into a register and try again. If that fails,
1542 abort. We always validate the address in this case. It is not valid
1543 to call this function after reload; it is mostly meant for RTL
1544 generation.
1546 MODE is the mode of OP, in case it is CONST_INT. */
1549 operand_subword_force (op, i, mode)
1550 rtx op;
1551 unsigned int i;
1552 enum machine_mode mode;
1554 rtx result = operand_subword (op, i, 1, mode);
1556 if (result)
1557 return result;
1559 if (mode != BLKmode && mode != VOIDmode)
1561 /* If this is a register which can not be accessed by words, copy it
1562 to a pseudo register. */
1563 if (GET_CODE (op) == REG)
1564 op = copy_to_reg (op);
1565 else
1566 op = force_reg (mode, op);
1569 result = operand_subword (op, i, 1, mode);
1570 if (result == 0)
1571 abort ();
1573 return result;
1576 /* Given a compare instruction, swap the operands.
1577 A test instruction is changed into a compare of 0 against the operand. */
1579 void
1580 reverse_comparison (insn)
1581 rtx insn;
1583 rtx body = PATTERN (insn);
1584 rtx comp;
1586 if (GET_CODE (body) == SET)
1587 comp = SET_SRC (body);
1588 else
1589 comp = SET_SRC (XVECEXP (body, 0, 0));
1591 if (GET_CODE (comp) == COMPARE)
1593 rtx op0 = XEXP (comp, 0);
1594 rtx op1 = XEXP (comp, 1);
1595 XEXP (comp, 0) = op1;
1596 XEXP (comp, 1) = op0;
1598 else
1600 rtx new = gen_rtx_COMPARE (VOIDmode,
1601 CONST0_RTX (GET_MODE (comp)), comp);
1602 if (GET_CODE (body) == SET)
1603 SET_SRC (body) = new;
1604 else
1605 SET_SRC (XVECEXP (body, 0, 0)) = new;
1609 /* Return a memory reference like MEMREF, but with its mode changed
1610 to MODE and its address changed to ADDR.
1611 (VOIDmode means don't change the mode.
1612 NULL for ADDR means don't change the address.) */
1615 change_address (memref, mode, addr)
1616 rtx memref;
1617 enum machine_mode mode;
1618 rtx addr;
1620 rtx new;
1622 if (GET_CODE (memref) != MEM)
1623 abort ();
1624 if (mode == VOIDmode)
1625 mode = GET_MODE (memref);
1626 if (addr == 0)
1627 addr = XEXP (memref, 0);
1629 /* If reload is in progress or has completed, ADDR must be valid.
1630 Otherwise, we can call memory_address to make it valid. */
1631 if (reload_completed || reload_in_progress)
1633 if (! memory_address_p (mode, addr))
1634 abort ();
1636 else
1637 addr = memory_address (mode, addr);
1639 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1640 return memref;
1642 new = gen_rtx_MEM (mode, addr);
1643 MEM_COPY_ATTRIBUTES (new, memref);
1644 return new;
1647 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1650 gen_label_rtx ()
1652 register rtx label;
1654 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1655 NULL_RTX, label_num++, NULL_PTR, NULL_PTR);
1657 LABEL_NUSES (label) = 0;
1658 LABEL_ALTERNATE_NAME (label) = NULL;
1659 return label;
1662 /* For procedure integration. */
1664 /* Install new pointers to the first and last insns in the chain.
1665 Also, set cur_insn_uid to one higher than the last in use.
1666 Used for an inline-procedure after copying the insn chain. */
1668 void
1669 set_new_first_and_last_insn (first, last)
1670 rtx first, last;
1672 rtx insn;
1674 first_insn = first;
1675 last_insn = last;
1676 cur_insn_uid = 0;
1678 for (insn = first; insn; insn = NEXT_INSN (insn))
1679 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1681 cur_insn_uid++;
1684 /* Set the range of label numbers found in the current function.
1685 This is used when belatedly compiling an inline function. */
1687 void
1688 set_new_first_and_last_label_num (first, last)
1689 int first, last;
1691 base_label_num = label_num;
1692 first_label_num = first;
1693 last_label_num = last;
1696 /* Set the last label number found in the current function.
1697 This is used when belatedly compiling an inline function. */
1699 void
1700 set_new_last_label_num (last)
1701 int last;
1703 base_label_num = label_num;
1704 last_label_num = last;
1707 /* Restore all variables describing the current status from the structure *P.
1708 This is used after a nested function. */
1710 void
1711 restore_emit_status (p)
1712 struct function *p ATTRIBUTE_UNUSED;
1714 last_label_num = 0;
1715 clear_emit_caches ();
1718 /* Clear out all parts of the state in F that can safely be discarded
1719 after the function has been compiled, to let garbage collection
1720 reclaim the memory. */
1722 void
1723 free_emit_status (f)
1724 struct function *f;
1726 free (f->emit->x_regno_reg_rtx);
1727 free (f->emit->regno_pointer_align);
1728 free (f->emit);
1729 f->emit = NULL;
1732 /* Go through all the RTL insn bodies and copy any invalid shared
1733 structure. This routine should only be called once. */
1735 void
1736 unshare_all_rtl (fndecl, insn)
1737 tree fndecl;
1738 rtx insn;
1740 tree decl;
1742 /* Make sure that virtual parameters are not shared. */
1743 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1744 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
1746 /* Make sure that virtual stack slots are not shared. */
1747 unshare_all_decls (DECL_INITIAL (fndecl));
1749 /* Unshare just about everything else. */
1750 unshare_all_rtl_1 (insn);
1752 /* Make sure the addresses of stack slots found outside the insn chain
1753 (such as, in DECL_RTL of a variable) are not shared
1754 with the insn chain.
1756 This special care is necessary when the stack slot MEM does not
1757 actually appear in the insn chain. If it does appear, its address
1758 is unshared from all else at that point. */
1759 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
1762 /* Go through all the RTL insn bodies and copy any invalid shared
1763 structure, again. This is a fairly expensive thing to do so it
1764 should be done sparingly. */
1766 void
1767 unshare_all_rtl_again (insn)
1768 rtx insn;
1770 rtx p;
1771 tree decl;
1773 for (p = insn; p; p = NEXT_INSN (p))
1774 if (INSN_P (p))
1776 reset_used_flags (PATTERN (p));
1777 reset_used_flags (REG_NOTES (p));
1778 reset_used_flags (LOG_LINKS (p));
1781 /* Make sure that virtual stack slots are not shared. */
1782 reset_used_decls (DECL_INITIAL (cfun->decl));
1784 /* Make sure that virtual parameters are not shared. */
1785 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
1786 reset_used_flags (DECL_RTL (decl));
1788 reset_used_flags (stack_slot_list);
1790 unshare_all_rtl (cfun->decl, insn);
1793 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1794 Assumes the mark bits are cleared at entry. */
1796 static void
1797 unshare_all_rtl_1 (insn)
1798 rtx insn;
1800 for (; insn; insn = NEXT_INSN (insn))
1801 if (INSN_P (insn))
1803 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1804 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1805 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1809 /* Go through all virtual stack slots of a function and copy any
1810 shared structure. */
1811 static void
1812 unshare_all_decls (blk)
1813 tree blk;
1815 tree t;
1817 /* Copy shared decls. */
1818 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
1819 if (DECL_RTL_SET_P (t))
1820 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
1822 /* Now process sub-blocks. */
1823 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1824 unshare_all_decls (t);
1827 /* Go through all virtual stack slots of a function and mark them as
1828 not shared. */
1829 static void
1830 reset_used_decls (blk)
1831 tree blk;
1833 tree t;
1835 /* Mark decls. */
1836 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
1837 if (DECL_RTL_SET_P (t))
1838 reset_used_flags (DECL_RTL (t));
1840 /* Now process sub-blocks. */
1841 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1842 reset_used_decls (t);
1845 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1846 Recursively does the same for subexpressions. */
1849 copy_rtx_if_shared (orig)
1850 rtx orig;
1852 register rtx x = orig;
1853 register int i;
1854 register enum rtx_code code;
1855 register const char *format_ptr;
1856 int copied = 0;
1858 if (x == 0)
1859 return 0;
1861 code = GET_CODE (x);
1863 /* These types may be freely shared. */
1865 switch (code)
1867 case REG:
1868 case QUEUED:
1869 case CONST_INT:
1870 case CONST_DOUBLE:
1871 case SYMBOL_REF:
1872 case CODE_LABEL:
1873 case PC:
1874 case CC0:
1875 case SCRATCH:
1876 /* SCRATCH must be shared because they represent distinct values. */
1877 return x;
1879 case CONST:
1880 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1881 a LABEL_REF, it isn't sharable. */
1882 if (GET_CODE (XEXP (x, 0)) == PLUS
1883 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1884 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1885 return x;
1886 break;
1888 case INSN:
1889 case JUMP_INSN:
1890 case CALL_INSN:
1891 case NOTE:
1892 case BARRIER:
1893 /* The chain of insns is not being copied. */
1894 return x;
1896 case MEM:
1897 /* A MEM is allowed to be shared if its address is constant.
1899 We used to allow sharing of MEMs which referenced
1900 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1901 that can lose. instantiate_virtual_regs will not unshare
1902 the MEMs, and combine may change the structure of the address
1903 because it looks safe and profitable in one context, but
1904 in some other context it creates unrecognizable RTL. */
1905 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
1906 return x;
1908 break;
1910 default:
1911 break;
1914 /* This rtx may not be shared. If it has already been seen,
1915 replace it with a copy of itself. */
1917 if (x->used)
1919 register rtx copy;
1921 copy = rtx_alloc (code);
1922 memcpy (copy, x,
1923 (sizeof (*copy) - sizeof (copy->fld)
1924 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1925 x = copy;
1926 copied = 1;
1928 x->used = 1;
1930 /* Now scan the subexpressions recursively.
1931 We can store any replaced subexpressions directly into X
1932 since we know X is not shared! Any vectors in X
1933 must be copied if X was copied. */
1935 format_ptr = GET_RTX_FORMAT (code);
1937 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1939 switch (*format_ptr++)
1941 case 'e':
1942 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1943 break;
1945 case 'E':
1946 if (XVEC (x, i) != NULL)
1948 register int j;
1949 int len = XVECLEN (x, i);
1951 if (copied && len > 0)
1952 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
1953 for (j = 0; j < len; j++)
1954 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
1956 break;
1959 return x;
1962 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1963 to look for shared sub-parts. */
1965 void
1966 reset_used_flags (x)
1967 rtx x;
1969 register int i, j;
1970 register enum rtx_code code;
1971 register const char *format_ptr;
1973 if (x == 0)
1974 return;
1976 code = GET_CODE (x);
1978 /* These types may be freely shared so we needn't do any resetting
1979 for them. */
1981 switch (code)
1983 case REG:
1984 case QUEUED:
1985 case CONST_INT:
1986 case CONST_DOUBLE:
1987 case SYMBOL_REF:
1988 case CODE_LABEL:
1989 case PC:
1990 case CC0:
1991 return;
1993 case INSN:
1994 case JUMP_INSN:
1995 case CALL_INSN:
1996 case NOTE:
1997 case LABEL_REF:
1998 case BARRIER:
1999 /* The chain of insns is not being copied. */
2000 return;
2002 default:
2003 break;
2006 x->used = 0;
2008 format_ptr = GET_RTX_FORMAT (code);
2009 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2011 switch (*format_ptr++)
2013 case 'e':
2014 reset_used_flags (XEXP (x, i));
2015 break;
2017 case 'E':
2018 for (j = 0; j < XVECLEN (x, i); j++)
2019 reset_used_flags (XVECEXP (x, i, j));
2020 break;
2025 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2026 Return X or the rtx for the pseudo reg the value of X was copied into.
2027 OTHER must be valid as a SET_DEST. */
2030 make_safe_from (x, other)
2031 rtx x, other;
2033 while (1)
2034 switch (GET_CODE (other))
2036 case SUBREG:
2037 other = SUBREG_REG (other);
2038 break;
2039 case STRICT_LOW_PART:
2040 case SIGN_EXTEND:
2041 case ZERO_EXTEND:
2042 other = XEXP (other, 0);
2043 break;
2044 default:
2045 goto done;
2047 done:
2048 if ((GET_CODE (other) == MEM
2049 && ! CONSTANT_P (x)
2050 && GET_CODE (x) != REG
2051 && GET_CODE (x) != SUBREG)
2052 || (GET_CODE (other) == REG
2053 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2054 || reg_mentioned_p (other, x))))
2056 rtx temp = gen_reg_rtx (GET_MODE (x));
2057 emit_move_insn (temp, x);
2058 return temp;
2060 return x;
2063 /* Emission of insns (adding them to the doubly-linked list). */
2065 /* Return the first insn of the current sequence or current function. */
2068 get_insns ()
2070 return first_insn;
2073 /* Return the last insn emitted in current sequence or current function. */
2076 get_last_insn ()
2078 return last_insn;
2081 /* Specify a new insn as the last in the chain. */
2083 void
2084 set_last_insn (insn)
2085 rtx insn;
2087 if (NEXT_INSN (insn) != 0)
2088 abort ();
2089 last_insn = insn;
2092 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2095 get_last_insn_anywhere ()
2097 struct sequence_stack *stack;
2098 if (last_insn)
2099 return last_insn;
2100 for (stack = seq_stack; stack; stack = stack->next)
2101 if (stack->last != 0)
2102 return stack->last;
2103 return 0;
2106 /* Return a number larger than any instruction's uid in this function. */
2109 get_max_uid ()
2111 return cur_insn_uid;
2114 /* Renumber instructions so that no instruction UIDs are wasted. */
2116 void
2117 renumber_insns (stream)
2118 FILE *stream;
2120 rtx insn;
2122 /* If we're not supposed to renumber instructions, don't. */
2123 if (!flag_renumber_insns)
2124 return;
2126 /* If there aren't that many instructions, then it's not really
2127 worth renumbering them. */
2128 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2129 return;
2131 cur_insn_uid = 1;
2133 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2135 if (stream)
2136 fprintf (stream, "Renumbering insn %d to %d\n",
2137 INSN_UID (insn), cur_insn_uid);
2138 INSN_UID (insn) = cur_insn_uid++;
2142 /* Return the next insn. If it is a SEQUENCE, return the first insn
2143 of the sequence. */
2146 next_insn (insn)
2147 rtx insn;
2149 if (insn)
2151 insn = NEXT_INSN (insn);
2152 if (insn && GET_CODE (insn) == INSN
2153 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2154 insn = XVECEXP (PATTERN (insn), 0, 0);
2157 return insn;
2160 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2161 of the sequence. */
2164 previous_insn (insn)
2165 rtx insn;
2167 if (insn)
2169 insn = PREV_INSN (insn);
2170 if (insn && GET_CODE (insn) == INSN
2171 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2172 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2175 return insn;
2178 /* Return the next insn after INSN that is not a NOTE. This routine does not
2179 look inside SEQUENCEs. */
2182 next_nonnote_insn (insn)
2183 rtx insn;
2185 while (insn)
2187 insn = NEXT_INSN (insn);
2188 if (insn == 0 || GET_CODE (insn) != NOTE)
2189 break;
2192 return insn;
2195 /* Return the previous insn before INSN that is not a NOTE. This routine does
2196 not look inside SEQUENCEs. */
2199 prev_nonnote_insn (insn)
2200 rtx insn;
2202 while (insn)
2204 insn = PREV_INSN (insn);
2205 if (insn == 0 || GET_CODE (insn) != NOTE)
2206 break;
2209 return insn;
2212 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2213 or 0, if there is none. This routine does not look inside
2214 SEQUENCEs. */
2217 next_real_insn (insn)
2218 rtx insn;
2220 while (insn)
2222 insn = NEXT_INSN (insn);
2223 if (insn == 0 || GET_CODE (insn) == INSN
2224 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2225 break;
2228 return insn;
2231 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2232 or 0, if there is none. This routine does not look inside
2233 SEQUENCEs. */
2236 prev_real_insn (insn)
2237 rtx insn;
2239 while (insn)
2241 insn = PREV_INSN (insn);
2242 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2243 || GET_CODE (insn) == JUMP_INSN)
2244 break;
2247 return insn;
2250 /* Find the next insn after INSN that really does something. This routine
2251 does not look inside SEQUENCEs. Until reload has completed, this is the
2252 same as next_real_insn. */
2255 active_insn_p (insn)
2256 rtx insn;
2258 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2259 || (GET_CODE (insn) == INSN
2260 && (! reload_completed
2261 || (GET_CODE (PATTERN (insn)) != USE
2262 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2266 next_active_insn (insn)
2267 rtx insn;
2269 while (insn)
2271 insn = NEXT_INSN (insn);
2272 if (insn == 0 || active_insn_p (insn))
2273 break;
2276 return insn;
2279 /* Find the last insn before INSN that really does something. This routine
2280 does not look inside SEQUENCEs. Until reload has completed, this is the
2281 same as prev_real_insn. */
2284 prev_active_insn (insn)
2285 rtx insn;
2287 while (insn)
2289 insn = PREV_INSN (insn);
2290 if (insn == 0 || active_insn_p (insn))
2291 break;
2294 return insn;
2297 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2300 next_label (insn)
2301 rtx insn;
2303 while (insn)
2305 insn = NEXT_INSN (insn);
2306 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2307 break;
2310 return insn;
2313 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2316 prev_label (insn)
2317 rtx insn;
2319 while (insn)
2321 insn = PREV_INSN (insn);
2322 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2323 break;
2326 return insn;
2329 #ifdef HAVE_cc0
2330 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2331 and REG_CC_USER notes so we can find it. */
2333 void
2334 link_cc0_insns (insn)
2335 rtx insn;
2337 rtx user = next_nonnote_insn (insn);
2339 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2340 user = XVECEXP (PATTERN (user), 0, 0);
2342 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2343 REG_NOTES (user));
2344 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2347 /* Return the next insn that uses CC0 after INSN, which is assumed to
2348 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2349 applied to the result of this function should yield INSN).
2351 Normally, this is simply the next insn. However, if a REG_CC_USER note
2352 is present, it contains the insn that uses CC0.
2354 Return 0 if we can't find the insn. */
2357 next_cc0_user (insn)
2358 rtx insn;
2360 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2362 if (note)
2363 return XEXP (note, 0);
2365 insn = next_nonnote_insn (insn);
2366 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2367 insn = XVECEXP (PATTERN (insn), 0, 0);
2369 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2370 return insn;
2372 return 0;
2375 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2376 note, it is the previous insn. */
2379 prev_cc0_setter (insn)
2380 rtx insn;
2382 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2384 if (note)
2385 return XEXP (note, 0);
2387 insn = prev_nonnote_insn (insn);
2388 if (! sets_cc0_p (PATTERN (insn)))
2389 abort ();
2391 return insn;
2393 #endif
2395 /* Try splitting insns that can be split for better scheduling.
2396 PAT is the pattern which might split.
2397 TRIAL is the insn providing PAT.
2398 LAST is non-zero if we should return the last insn of the sequence produced.
2400 If this routine succeeds in splitting, it returns the first or last
2401 replacement insn depending on the value of LAST. Otherwise, it
2402 returns TRIAL. If the insn to be returned can be split, it will be. */
2405 try_split (pat, trial, last)
2406 rtx pat, trial;
2407 int last;
2409 rtx before = PREV_INSN (trial);
2410 rtx after = NEXT_INSN (trial);
2411 rtx seq = split_insns (pat, trial);
2412 int has_barrier = 0;
2413 rtx tem;
2415 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2416 We may need to handle this specially. */
2417 if (after && GET_CODE (after) == BARRIER)
2419 has_barrier = 1;
2420 after = NEXT_INSN (after);
2423 if (seq)
2425 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2426 The latter case will normally arise only when being done so that
2427 it, in turn, will be split (SFmode on the 29k is an example). */
2428 if (GET_CODE (seq) == SEQUENCE)
2430 int i;
2432 /* Avoid infinite loop if any insn of the result matches
2433 the original pattern. */
2434 for (i = 0; i < XVECLEN (seq, 0); i++)
2435 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2436 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2437 return trial;
2439 /* Mark labels. */
2440 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2441 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2442 mark_jump_label (PATTERN (XVECEXP (seq, 0, i)),
2443 XVECEXP (seq, 0, i), 0, 0);
2445 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2446 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2447 if (GET_CODE (trial) == CALL_INSN)
2448 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2449 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2450 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2451 = CALL_INSN_FUNCTION_USAGE (trial);
2453 tem = emit_insn_after (seq, before);
2455 delete_insn (trial);
2456 if (has_barrier)
2457 emit_barrier_after (tem);
2459 /* Recursively call try_split for each new insn created; by the
2460 time control returns here that insn will be fully split, so
2461 set LAST and continue from the insn after the one returned.
2462 We can't use next_active_insn here since AFTER may be a note.
2463 Ignore deleted insns, which can be occur if not optimizing. */
2464 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2465 if (! INSN_DELETED_P (tem) && INSN_P (tem))
2466 tem = try_split (PATTERN (tem), tem, 1);
2468 /* Avoid infinite loop if the result matches the original pattern. */
2469 else if (rtx_equal_p (seq, pat))
2470 return trial;
2471 else
2473 PATTERN (trial) = seq;
2474 INSN_CODE (trial) = -1;
2475 try_split (seq, trial, last);
2478 /* Return either the first or the last insn, depending on which was
2479 requested. */
2480 return last
2481 ? (after ? prev_active_insn (after) : last_insn)
2482 : next_active_insn (before);
2485 return trial;
2488 /* Make and return an INSN rtx, initializing all its slots.
2489 Store PATTERN in the pattern slots. */
2492 make_insn_raw (pattern)
2493 rtx pattern;
2495 register rtx insn;
2497 insn = rtx_alloc (INSN);
2499 INSN_UID (insn) = cur_insn_uid++;
2500 PATTERN (insn) = pattern;
2501 INSN_CODE (insn) = -1;
2502 LOG_LINKS (insn) = NULL;
2503 REG_NOTES (insn) = NULL;
2505 #ifdef ENABLE_RTL_CHECKING
2506 if (insn
2507 && INSN_P (insn)
2508 && (returnjump_p (insn)
2509 || (GET_CODE (insn) == SET
2510 && SET_DEST (insn) == pc_rtx)))
2512 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2513 debug_rtx (insn);
2515 #endif
2517 return insn;
2520 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2522 static rtx
2523 make_jump_insn_raw (pattern)
2524 rtx pattern;
2526 register rtx insn;
2528 insn = rtx_alloc (JUMP_INSN);
2529 INSN_UID (insn) = cur_insn_uid++;
2531 PATTERN (insn) = pattern;
2532 INSN_CODE (insn) = -1;
2533 LOG_LINKS (insn) = NULL;
2534 REG_NOTES (insn) = NULL;
2535 JUMP_LABEL (insn) = NULL;
2537 return insn;
2540 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2542 static rtx
2543 make_call_insn_raw (pattern)
2544 rtx pattern;
2546 register rtx insn;
2548 insn = rtx_alloc (CALL_INSN);
2549 INSN_UID (insn) = cur_insn_uid++;
2551 PATTERN (insn) = pattern;
2552 INSN_CODE (insn) = -1;
2553 LOG_LINKS (insn) = NULL;
2554 REG_NOTES (insn) = NULL;
2555 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2557 return insn;
2560 /* Add INSN to the end of the doubly-linked list.
2561 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2563 void
2564 add_insn (insn)
2565 register rtx insn;
2567 PREV_INSN (insn) = last_insn;
2568 NEXT_INSN (insn) = 0;
2570 if (NULL != last_insn)
2571 NEXT_INSN (last_insn) = insn;
2573 if (NULL == first_insn)
2574 first_insn = insn;
2576 last_insn = insn;
2579 /* Add INSN into the doubly-linked list after insn AFTER. This and
2580 the next should be the only functions called to insert an insn once
2581 delay slots have been filled since only they know how to update a
2582 SEQUENCE. */
2584 void
2585 add_insn_after (insn, after)
2586 rtx insn, after;
2588 rtx next = NEXT_INSN (after);
2590 if (optimize && INSN_DELETED_P (after))
2591 abort ();
2593 NEXT_INSN (insn) = next;
2594 PREV_INSN (insn) = after;
2596 if (next)
2598 PREV_INSN (next) = insn;
2599 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2600 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2602 else if (last_insn == after)
2603 last_insn = insn;
2604 else
2606 struct sequence_stack *stack = seq_stack;
2607 /* Scan all pending sequences too. */
2608 for (; stack; stack = stack->next)
2609 if (after == stack->last)
2611 stack->last = insn;
2612 break;
2615 if (stack == 0)
2616 abort ();
2619 NEXT_INSN (after) = insn;
2620 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2622 rtx sequence = PATTERN (after);
2623 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2627 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2628 the previous should be the only functions called to insert an insn once
2629 delay slots have been filled since only they know how to update a
2630 SEQUENCE. */
2632 void
2633 add_insn_before (insn, before)
2634 rtx insn, before;
2636 rtx prev = PREV_INSN (before);
2638 if (optimize && INSN_DELETED_P (before))
2639 abort ();
2641 PREV_INSN (insn) = prev;
2642 NEXT_INSN (insn) = before;
2644 if (prev)
2646 NEXT_INSN (prev) = insn;
2647 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2649 rtx sequence = PATTERN (prev);
2650 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2653 else if (first_insn == before)
2654 first_insn = insn;
2655 else
2657 struct sequence_stack *stack = seq_stack;
2658 /* Scan all pending sequences too. */
2659 for (; stack; stack = stack->next)
2660 if (before == stack->first)
2662 stack->first = insn;
2663 break;
2666 if (stack == 0)
2667 abort ();
2670 PREV_INSN (before) = insn;
2671 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2672 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2675 /* Remove an insn from its doubly-linked list. This function knows how
2676 to handle sequences. */
2677 void
2678 remove_insn (insn)
2679 rtx insn;
2681 rtx next = NEXT_INSN (insn);
2682 rtx prev = PREV_INSN (insn);
2683 if (prev)
2685 NEXT_INSN (prev) = next;
2686 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2688 rtx sequence = PATTERN (prev);
2689 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2692 else if (first_insn == insn)
2693 first_insn = next;
2694 else
2696 struct sequence_stack *stack = seq_stack;
2697 /* Scan all pending sequences too. */
2698 for (; stack; stack = stack->next)
2699 if (insn == stack->first)
2701 stack->first = next;
2702 break;
2705 if (stack == 0)
2706 abort ();
2709 if (next)
2711 PREV_INSN (next) = prev;
2712 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2713 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2715 else if (last_insn == insn)
2716 last_insn = prev;
2717 else
2719 struct sequence_stack *stack = seq_stack;
2720 /* Scan all pending sequences too. */
2721 for (; stack; stack = stack->next)
2722 if (insn == stack->last)
2724 stack->last = prev;
2725 break;
2728 if (stack == 0)
2729 abort ();
2733 /* Delete all insns made since FROM.
2734 FROM becomes the new last instruction. */
2736 void
2737 delete_insns_since (from)
2738 rtx from;
2740 if (from == 0)
2741 first_insn = 0;
2742 else
2743 NEXT_INSN (from) = 0;
2744 last_insn = from;
2747 /* This function is deprecated, please use sequences instead.
2749 Move a consecutive bunch of insns to a different place in the chain.
2750 The insns to be moved are those between FROM and TO.
2751 They are moved to a new position after the insn AFTER.
2752 AFTER must not be FROM or TO or any insn in between.
2754 This function does not know about SEQUENCEs and hence should not be
2755 called after delay-slot filling has been done. */
2757 void
2758 reorder_insns (from, to, after)
2759 rtx from, to, after;
2761 /* Splice this bunch out of where it is now. */
2762 if (PREV_INSN (from))
2763 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2764 if (NEXT_INSN (to))
2765 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2766 if (last_insn == to)
2767 last_insn = PREV_INSN (from);
2768 if (first_insn == from)
2769 first_insn = NEXT_INSN (to);
2771 /* Make the new neighbors point to it and it to them. */
2772 if (NEXT_INSN (after))
2773 PREV_INSN (NEXT_INSN (after)) = to;
2775 NEXT_INSN (to) = NEXT_INSN (after);
2776 PREV_INSN (from) = after;
2777 NEXT_INSN (after) = from;
2778 if (after == last_insn)
2779 last_insn = to;
2782 /* Return the line note insn preceding INSN. */
2784 static rtx
2785 find_line_note (insn)
2786 rtx insn;
2788 if (no_line_numbers)
2789 return 0;
2791 for (; insn; insn = PREV_INSN (insn))
2792 if (GET_CODE (insn) == NOTE
2793 && NOTE_LINE_NUMBER (insn) >= 0)
2794 break;
2796 return insn;
2799 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2800 of the moved insns when debugging. This may insert a note between AFTER
2801 and FROM, and another one after TO. */
2803 void
2804 reorder_insns_with_line_notes (from, to, after)
2805 rtx from, to, after;
2807 rtx from_line = find_line_note (from);
2808 rtx after_line = find_line_note (after);
2810 reorder_insns (from, to, after);
2812 if (from_line == after_line)
2813 return;
2815 if (from_line)
2816 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2817 NOTE_LINE_NUMBER (from_line),
2818 after);
2819 if (after_line)
2820 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2821 NOTE_LINE_NUMBER (after_line),
2822 to);
2825 /* Remove unnecessary notes from the instruction stream. */
2827 void
2828 remove_unnecessary_notes ()
2830 rtx insn;
2831 rtx next;
2833 /* We must not remove the first instruction in the function because
2834 the compiler depends on the first instruction being a note. */
2835 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
2837 /* Remember what's next. */
2838 next = NEXT_INSN (insn);
2840 /* We're only interested in notes. */
2841 if (GET_CODE (insn) != NOTE)
2842 continue;
2844 /* By now, all notes indicating lexical blocks should have
2845 NOTE_BLOCK filled in. */
2846 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2847 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
2848 && NOTE_BLOCK (insn) == NULL_TREE)
2849 abort ();
2851 /* Remove NOTE_INSN_DELETED notes. */
2852 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
2853 remove_insn (insn);
2854 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
2856 /* Scan back to see if there are any non-note instructions
2857 between INSN and the beginning of this block. If not,
2858 then there is no PC range in the generated code that will
2859 actually be in this block, so there's no point in
2860 remembering the existence of the block. */
2861 rtx prev;
2863 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
2865 /* This block contains a real instruction. Note that we
2866 don't include labels; if the only thing in the block
2867 is a label, then there are still no PC values that
2868 lie within the block. */
2869 if (INSN_P (prev))
2870 break;
2872 /* We're only interested in NOTEs. */
2873 if (GET_CODE (prev) != NOTE)
2874 continue;
2876 if (NOTE_LINE_NUMBER (prev) == NOTE_INSN_BLOCK_BEG)
2878 /* If the BLOCKs referred to by these notes don't
2879 match, then something is wrong with our BLOCK
2880 nesting structure. */
2881 if (NOTE_BLOCK (prev) != NOTE_BLOCK (insn))
2882 abort ();
2884 if (debug_ignore_block (NOTE_BLOCK (insn)))
2886 remove_insn (prev);
2887 remove_insn (insn);
2889 break;
2891 else if (NOTE_LINE_NUMBER (prev) == NOTE_INSN_BLOCK_END)
2892 /* There's a nested block. We need to leave the
2893 current block in place since otherwise the debugger
2894 wouldn't be able to show symbols from our block in
2895 the nested block. */
2896 break;
2903 /* Emit an insn of given code and pattern
2904 at a specified place within the doubly-linked list. */
2906 /* Make an instruction with body PATTERN
2907 and output it before the instruction BEFORE. */
2910 emit_insn_before (pattern, before)
2911 register rtx pattern, before;
2913 register rtx insn = before;
2915 if (GET_CODE (pattern) == SEQUENCE)
2917 register int i;
2919 for (i = 0; i < XVECLEN (pattern, 0); i++)
2921 insn = XVECEXP (pattern, 0, i);
2922 add_insn_before (insn, before);
2925 else
2927 insn = make_insn_raw (pattern);
2928 add_insn_before (insn, before);
2931 return insn;
2934 /* Similar to emit_insn_before, but update basic block boundaries as well. */
2937 emit_block_insn_before (pattern, before, block)
2938 rtx pattern, before;
2939 basic_block block;
2941 rtx prev = PREV_INSN (before);
2942 rtx r = emit_insn_before (pattern, before);
2943 if (block && block->head == before)
2944 block->head = NEXT_INSN (prev);
2945 return r;
2948 /* Make an instruction with body PATTERN and code JUMP_INSN
2949 and output it before the instruction BEFORE. */
2952 emit_jump_insn_before (pattern, before)
2953 register rtx pattern, before;
2955 register rtx insn;
2957 if (GET_CODE (pattern) == SEQUENCE)
2958 insn = emit_insn_before (pattern, before);
2959 else
2961 insn = make_jump_insn_raw (pattern);
2962 add_insn_before (insn, before);
2965 return insn;
2968 /* Make an instruction with body PATTERN and code CALL_INSN
2969 and output it before the instruction BEFORE. */
2972 emit_call_insn_before (pattern, before)
2973 register rtx pattern, before;
2975 register rtx insn;
2977 if (GET_CODE (pattern) == SEQUENCE)
2978 insn = emit_insn_before (pattern, before);
2979 else
2981 insn = make_call_insn_raw (pattern);
2982 add_insn_before (insn, before);
2983 PUT_CODE (insn, CALL_INSN);
2986 return insn;
2989 /* Make an insn of code BARRIER
2990 and output it before the insn BEFORE. */
2993 emit_barrier_before (before)
2994 register rtx before;
2996 register rtx insn = rtx_alloc (BARRIER);
2998 INSN_UID (insn) = cur_insn_uid++;
3000 add_insn_before (insn, before);
3001 return insn;
3004 /* Emit the label LABEL before the insn BEFORE. */
3007 emit_label_before (label, before)
3008 rtx label, before;
3010 /* This can be called twice for the same label as a result of the
3011 confusion that follows a syntax error! So make it harmless. */
3012 if (INSN_UID (label) == 0)
3014 INSN_UID (label) = cur_insn_uid++;
3015 add_insn_before (label, before);
3018 return label;
3021 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3024 emit_note_before (subtype, before)
3025 int subtype;
3026 rtx before;
3028 register rtx note = rtx_alloc (NOTE);
3029 INSN_UID (note) = cur_insn_uid++;
3030 NOTE_SOURCE_FILE (note) = 0;
3031 NOTE_LINE_NUMBER (note) = subtype;
3033 add_insn_before (note, before);
3034 return note;
3037 /* Make an insn of code INSN with body PATTERN
3038 and output it after the insn AFTER. */
3041 emit_insn_after (pattern, after)
3042 register rtx pattern, after;
3044 register rtx insn = after;
3046 if (GET_CODE (pattern) == SEQUENCE)
3048 register int i;
3050 for (i = 0; i < XVECLEN (pattern, 0); i++)
3052 insn = XVECEXP (pattern, 0, i);
3053 add_insn_after (insn, after);
3054 after = insn;
3057 else
3059 insn = make_insn_raw (pattern);
3060 add_insn_after (insn, after);
3063 return insn;
3066 /* Similar to emit_insn_after, except that line notes are to be inserted so
3067 as to act as if this insn were at FROM. */
3069 void
3070 emit_insn_after_with_line_notes (pattern, after, from)
3071 rtx pattern, after, from;
3073 rtx from_line = find_line_note (from);
3074 rtx after_line = find_line_note (after);
3075 rtx insn = emit_insn_after (pattern, after);
3077 if (from_line)
3078 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3079 NOTE_LINE_NUMBER (from_line),
3080 after);
3082 if (after_line)
3083 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3084 NOTE_LINE_NUMBER (after_line),
3085 insn);
3088 /* Similar to emit_insn_after, but update basic block boundaries as well. */
3091 emit_block_insn_after (pattern, after, block)
3092 rtx pattern, after;
3093 basic_block block;
3095 rtx r = emit_insn_after (pattern, after);
3096 if (block && block->end == after)
3097 block->end = r;
3098 return r;
3101 /* Make an insn of code JUMP_INSN with body PATTERN
3102 and output it after the insn AFTER. */
3105 emit_jump_insn_after (pattern, after)
3106 register rtx pattern, after;
3108 register rtx insn;
3110 if (GET_CODE (pattern) == SEQUENCE)
3111 insn = emit_insn_after (pattern, after);
3112 else
3114 insn = make_jump_insn_raw (pattern);
3115 add_insn_after (insn, after);
3118 return insn;
3121 /* Make an insn of code BARRIER
3122 and output it after the insn AFTER. */
3125 emit_barrier_after (after)
3126 register rtx after;
3128 register rtx insn = rtx_alloc (BARRIER);
3130 INSN_UID (insn) = cur_insn_uid++;
3132 add_insn_after (insn, after);
3133 return insn;
3136 /* Emit the label LABEL after the insn AFTER. */
3139 emit_label_after (label, after)
3140 rtx label, after;
3142 /* This can be called twice for the same label
3143 as a result of the confusion that follows a syntax error!
3144 So make it harmless. */
3145 if (INSN_UID (label) == 0)
3147 INSN_UID (label) = cur_insn_uid++;
3148 add_insn_after (label, after);
3151 return label;
3154 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3157 emit_note_after (subtype, after)
3158 int subtype;
3159 rtx after;
3161 register rtx note = rtx_alloc (NOTE);
3162 INSN_UID (note) = cur_insn_uid++;
3163 NOTE_SOURCE_FILE (note) = 0;
3164 NOTE_LINE_NUMBER (note) = subtype;
3165 add_insn_after (note, after);
3166 return note;
3169 /* Emit a line note for FILE and LINE after the insn AFTER. */
3172 emit_line_note_after (file, line, after)
3173 const char *file;
3174 int line;
3175 rtx after;
3177 register rtx note;
3179 if (no_line_numbers && line > 0)
3181 cur_insn_uid++;
3182 return 0;
3185 note = rtx_alloc (NOTE);
3186 INSN_UID (note) = cur_insn_uid++;
3187 NOTE_SOURCE_FILE (note) = file;
3188 NOTE_LINE_NUMBER (note) = line;
3189 add_insn_after (note, after);
3190 return note;
3193 /* Make an insn of code INSN with pattern PATTERN
3194 and add it to the end of the doubly-linked list.
3195 If PATTERN is a SEQUENCE, take the elements of it
3196 and emit an insn for each element.
3198 Returns the last insn emitted. */
3201 emit_insn (pattern)
3202 rtx pattern;
3204 rtx insn = last_insn;
3206 if (GET_CODE (pattern) == SEQUENCE)
3208 register int i;
3210 for (i = 0; i < XVECLEN (pattern, 0); i++)
3212 insn = XVECEXP (pattern, 0, i);
3213 add_insn (insn);
3216 else
3218 insn = make_insn_raw (pattern);
3219 add_insn (insn);
3222 return insn;
3225 /* Emit the insns in a chain starting with INSN.
3226 Return the last insn emitted. */
3229 emit_insns (insn)
3230 rtx insn;
3232 rtx last = 0;
3234 while (insn)
3236 rtx next = NEXT_INSN (insn);
3237 add_insn (insn);
3238 last = insn;
3239 insn = next;
3242 return last;
3245 /* Emit the insns in a chain starting with INSN and place them in front of
3246 the insn BEFORE. Return the last insn emitted. */
3249 emit_insns_before (insn, before)
3250 rtx insn;
3251 rtx before;
3253 rtx last = 0;
3255 while (insn)
3257 rtx next = NEXT_INSN (insn);
3258 add_insn_before (insn, before);
3259 last = insn;
3260 insn = next;
3263 return last;
3266 /* Emit the insns in a chain starting with FIRST and place them in back of
3267 the insn AFTER. Return the last insn emitted. */
3270 emit_insns_after (first, after)
3271 register rtx first;
3272 register rtx after;
3274 register rtx last;
3275 register rtx after_after;
3277 if (!after)
3278 abort ();
3280 if (!first)
3281 return first;
3283 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3284 continue;
3286 after_after = NEXT_INSN (after);
3288 NEXT_INSN (after) = first;
3289 PREV_INSN (first) = after;
3290 NEXT_INSN (last) = after_after;
3291 if (after_after)
3292 PREV_INSN (after_after) = last;
3294 if (after == last_insn)
3295 last_insn = last;
3296 return last;
3299 /* Make an insn of code JUMP_INSN with pattern PATTERN
3300 and add it to the end of the doubly-linked list. */
3303 emit_jump_insn (pattern)
3304 rtx pattern;
3306 if (GET_CODE (pattern) == SEQUENCE)
3307 return emit_insn (pattern);
3308 else
3310 register rtx insn = make_jump_insn_raw (pattern);
3311 add_insn (insn);
3312 return insn;
3316 /* Make an insn of code CALL_INSN with pattern PATTERN
3317 and add it to the end of the doubly-linked list. */
3320 emit_call_insn (pattern)
3321 rtx pattern;
3323 if (GET_CODE (pattern) == SEQUENCE)
3324 return emit_insn (pattern);
3325 else
3327 register rtx insn = make_call_insn_raw (pattern);
3328 add_insn (insn);
3329 PUT_CODE (insn, CALL_INSN);
3330 return insn;
3334 /* Add the label LABEL to the end of the doubly-linked list. */
3337 emit_label (label)
3338 rtx label;
3340 /* This can be called twice for the same label
3341 as a result of the confusion that follows a syntax error!
3342 So make it harmless. */
3343 if (INSN_UID (label) == 0)
3345 INSN_UID (label) = cur_insn_uid++;
3346 add_insn (label);
3348 return label;
3351 /* Make an insn of code BARRIER
3352 and add it to the end of the doubly-linked list. */
3355 emit_barrier ()
3357 register rtx barrier = rtx_alloc (BARRIER);
3358 INSN_UID (barrier) = cur_insn_uid++;
3359 add_insn (barrier);
3360 return barrier;
3363 /* Make an insn of code NOTE
3364 with data-fields specified by FILE and LINE
3365 and add it to the end of the doubly-linked list,
3366 but only if line-numbers are desired for debugging info. */
3369 emit_line_note (file, line)
3370 const char *file;
3371 int line;
3373 set_file_and_line_for_stmt (file, line);
3375 #if 0
3376 if (no_line_numbers)
3377 return 0;
3378 #endif
3380 return emit_note (file, line);
3383 /* Make an insn of code NOTE
3384 with data-fields specified by FILE and LINE
3385 and add it to the end of the doubly-linked list.
3386 If it is a line-number NOTE, omit it if it matches the previous one. */
3389 emit_note (file, line)
3390 const char *file;
3391 int line;
3393 register rtx note;
3395 if (line > 0)
3397 if (file && last_filename && !strcmp (file, last_filename)
3398 && line == last_linenum)
3399 return 0;
3400 last_filename = file;
3401 last_linenum = line;
3404 if (no_line_numbers && line > 0)
3406 cur_insn_uid++;
3407 return 0;
3410 note = rtx_alloc (NOTE);
3411 INSN_UID (note) = cur_insn_uid++;
3412 NOTE_SOURCE_FILE (note) = file;
3413 NOTE_LINE_NUMBER (note) = line;
3414 add_insn (note);
3415 return note;
3418 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3421 emit_line_note_force (file, line)
3422 const char *file;
3423 int line;
3425 last_linenum = -1;
3426 return emit_line_note (file, line);
3429 /* Cause next statement to emit a line note even if the line number
3430 has not changed. This is used at the beginning of a function. */
3432 void
3433 force_next_line_note ()
3435 last_linenum = -1;
3438 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3439 note of this type already exists, remove it first. */
3441 void
3442 set_unique_reg_note (insn, kind, datum)
3443 rtx insn;
3444 enum reg_note kind;
3445 rtx datum;
3447 rtx note = find_reg_note (insn, kind, NULL_RTX);
3449 /* First remove the note if there already is one. */
3450 if (note)
3451 remove_note (insn, note);
3453 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3456 /* Return an indication of which type of insn should have X as a body.
3457 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3459 enum rtx_code
3460 classify_insn (x)
3461 rtx x;
3463 if (GET_CODE (x) == CODE_LABEL)
3464 return CODE_LABEL;
3465 if (GET_CODE (x) == CALL)
3466 return CALL_INSN;
3467 if (GET_CODE (x) == RETURN)
3468 return JUMP_INSN;
3469 if (GET_CODE (x) == SET)
3471 if (SET_DEST (x) == pc_rtx)
3472 return JUMP_INSN;
3473 else if (GET_CODE (SET_SRC (x)) == CALL)
3474 return CALL_INSN;
3475 else
3476 return INSN;
3478 if (GET_CODE (x) == PARALLEL)
3480 register int j;
3481 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3482 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3483 return CALL_INSN;
3484 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3485 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3486 return JUMP_INSN;
3487 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3488 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3489 return CALL_INSN;
3491 return INSN;
3494 /* Emit the rtl pattern X as an appropriate kind of insn.
3495 If X is a label, it is simply added into the insn chain. */
3498 emit (x)
3499 rtx x;
3501 enum rtx_code code = classify_insn (x);
3503 if (code == CODE_LABEL)
3504 return emit_label (x);
3505 else if (code == INSN)
3506 return emit_insn (x);
3507 else if (code == JUMP_INSN)
3509 register rtx insn = emit_jump_insn (x);
3510 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
3511 return emit_barrier ();
3512 return insn;
3514 else if (code == CALL_INSN)
3515 return emit_call_insn (x);
3516 else
3517 abort ();
3520 /* Begin emitting insns to a sequence which can be packaged in an
3521 RTL_EXPR. If this sequence will contain something that might cause
3522 the compiler to pop arguments to function calls (because those
3523 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3524 details), use do_pending_stack_adjust before calling this function.
3525 That will ensure that the deferred pops are not accidentally
3526 emitted in the middle of this sequence. */
3528 void
3529 start_sequence ()
3531 struct sequence_stack *tem;
3533 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
3535 tem->next = seq_stack;
3536 tem->first = first_insn;
3537 tem->last = last_insn;
3538 tem->sequence_rtl_expr = seq_rtl_expr;
3540 seq_stack = tem;
3542 first_insn = 0;
3543 last_insn = 0;
3546 /* Similarly, but indicate that this sequence will be placed in T, an
3547 RTL_EXPR. See the documentation for start_sequence for more
3548 information about how to use this function. */
3550 void
3551 start_sequence_for_rtl_expr (t)
3552 tree t;
3554 start_sequence ();
3556 seq_rtl_expr = t;
3559 /* Set up the insn chain starting with FIRST as the current sequence,
3560 saving the previously current one. See the documentation for
3561 start_sequence for more information about how to use this function. */
3563 void
3564 push_to_sequence (first)
3565 rtx first;
3567 rtx last;
3569 start_sequence ();
3571 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3573 first_insn = first;
3574 last_insn = last;
3577 /* Set up the insn chain from a chain stort in FIRST to LAST. */
3579 void
3580 push_to_full_sequence (first, last)
3581 rtx first, last;
3583 start_sequence ();
3584 first_insn = first;
3585 last_insn = last;
3586 /* We really should have the end of the insn chain here. */
3587 if (last && NEXT_INSN (last))
3588 abort ();
3591 /* Set up the outer-level insn chain
3592 as the current sequence, saving the previously current one. */
3594 void
3595 push_topmost_sequence ()
3597 struct sequence_stack *stack, *top = NULL;
3599 start_sequence ();
3601 for (stack = seq_stack; stack; stack = stack->next)
3602 top = stack;
3604 first_insn = top->first;
3605 last_insn = top->last;
3606 seq_rtl_expr = top->sequence_rtl_expr;
3609 /* After emitting to the outer-level insn chain, update the outer-level
3610 insn chain, and restore the previous saved state. */
3612 void
3613 pop_topmost_sequence ()
3615 struct sequence_stack *stack, *top = NULL;
3617 for (stack = seq_stack; stack; stack = stack->next)
3618 top = stack;
3620 top->first = first_insn;
3621 top->last = last_insn;
3622 /* ??? Why don't we save seq_rtl_expr here? */
3624 end_sequence ();
3627 /* After emitting to a sequence, restore previous saved state.
3629 To get the contents of the sequence just made, you must call
3630 `gen_sequence' *before* calling here.
3632 If the compiler might have deferred popping arguments while
3633 generating this sequence, and this sequence will not be immediately
3634 inserted into the instruction stream, use do_pending_stack_adjust
3635 before calling gen_sequence. That will ensure that the deferred
3636 pops are inserted into this sequence, and not into some random
3637 location in the instruction stream. See INHIBIT_DEFER_POP for more
3638 information about deferred popping of arguments. */
3640 void
3641 end_sequence ()
3643 struct sequence_stack *tem = seq_stack;
3645 first_insn = tem->first;
3646 last_insn = tem->last;
3647 seq_rtl_expr = tem->sequence_rtl_expr;
3648 seq_stack = tem->next;
3650 free (tem);
3653 /* This works like end_sequence, but records the old sequence in FIRST
3654 and LAST. */
3656 void
3657 end_full_sequence (first, last)
3658 rtx *first, *last;
3660 *first = first_insn;
3661 *last = last_insn;
3662 end_sequence();
3665 /* Return 1 if currently emitting into a sequence. */
3668 in_sequence_p ()
3670 return seq_stack != 0;
3673 /* Generate a SEQUENCE rtx containing the insns already emitted
3674 to the current sequence.
3676 This is how the gen_... function from a DEFINE_EXPAND
3677 constructs the SEQUENCE that it returns. */
3680 gen_sequence ()
3682 rtx result;
3683 rtx tem;
3684 int i;
3685 int len;
3687 /* Count the insns in the chain. */
3688 len = 0;
3689 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3690 len++;
3692 /* If only one insn, return it rather than a SEQUENCE.
3693 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3694 the case of an empty list.)
3695 We only return the pattern of an insn if its code is INSN and it
3696 has no notes. This ensures that no information gets lost. */
3697 if (len == 1
3698 && ! RTX_FRAME_RELATED_P (first_insn)
3699 && GET_CODE (first_insn) == INSN
3700 /* Don't throw away any reg notes. */
3701 && REG_NOTES (first_insn) == 0)
3702 return PATTERN (first_insn);
3704 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3706 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3707 XVECEXP (result, 0, i) = tem;
3709 return result;
3712 /* Put the various virtual registers into REGNO_REG_RTX. */
3714 void
3715 init_virtual_regs (es)
3716 struct emit_status *es;
3718 rtx *ptr = es->x_regno_reg_rtx;
3719 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3720 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3721 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3722 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3723 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3726 void
3727 clear_emit_caches ()
3729 int i;
3731 /* Clear the start_sequence/gen_sequence cache. */
3732 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3733 sequence_result[i] = 0;
3734 free_insn = 0;
3737 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3738 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3739 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3740 static int copy_insn_n_scratches;
3742 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3743 copied an ASM_OPERANDS.
3744 In that case, it is the original input-operand vector. */
3745 static rtvec orig_asm_operands_vector;
3747 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3748 copied an ASM_OPERANDS.
3749 In that case, it is the copied input-operand vector. */
3750 static rtvec copy_asm_operands_vector;
3752 /* Likewise for the constraints vector. */
3753 static rtvec orig_asm_constraints_vector;
3754 static rtvec copy_asm_constraints_vector;
3756 /* Recursively create a new copy of an rtx for copy_insn.
3757 This function differs from copy_rtx in that it handles SCRATCHes and
3758 ASM_OPERANDs properly.
3759 Normally, this function is not used directly; use copy_insn as front end.
3760 However, you could first copy an insn pattern with copy_insn and then use
3761 this function afterwards to properly copy any REG_NOTEs containing
3762 SCRATCHes. */
3765 copy_insn_1 (orig)
3766 register rtx orig;
3768 register rtx copy;
3769 register int i, j;
3770 register RTX_CODE code;
3771 register const char *format_ptr;
3773 code = GET_CODE (orig);
3775 switch (code)
3777 case REG:
3778 case QUEUED:
3779 case CONST_INT:
3780 case CONST_DOUBLE:
3781 case SYMBOL_REF:
3782 case CODE_LABEL:
3783 case PC:
3784 case CC0:
3785 case ADDRESSOF:
3786 return orig;
3788 case SCRATCH:
3789 for (i = 0; i < copy_insn_n_scratches; i++)
3790 if (copy_insn_scratch_in[i] == orig)
3791 return copy_insn_scratch_out[i];
3792 break;
3794 case CONST:
3795 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3796 a LABEL_REF, it isn't sharable. */
3797 if (GET_CODE (XEXP (orig, 0)) == PLUS
3798 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3799 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3800 return orig;
3801 break;
3803 /* A MEM with a constant address is not sharable. The problem is that
3804 the constant address may need to be reloaded. If the mem is shared,
3805 then reloading one copy of this mem will cause all copies to appear
3806 to have been reloaded. */
3808 default:
3809 break;
3812 copy = rtx_alloc (code);
3814 /* Copy the various flags, and other information. We assume that
3815 all fields need copying, and then clear the fields that should
3816 not be copied. That is the sensible default behavior, and forces
3817 us to explicitly document why we are *not* copying a flag. */
3818 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3820 /* We do not copy the USED flag, which is used as a mark bit during
3821 walks over the RTL. */
3822 copy->used = 0;
3824 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3825 if (GET_RTX_CLASS (code) == 'i')
3827 copy->jump = 0;
3828 copy->call = 0;
3829 copy->frame_related = 0;
3832 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3834 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3836 copy->fld[i] = orig->fld[i];
3837 switch (*format_ptr++)
3839 case 'e':
3840 if (XEXP (orig, i) != NULL)
3841 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
3842 break;
3844 case 'E':
3845 case 'V':
3846 if (XVEC (orig, i) == orig_asm_constraints_vector)
3847 XVEC (copy, i) = copy_asm_constraints_vector;
3848 else if (XVEC (orig, i) == orig_asm_operands_vector)
3849 XVEC (copy, i) = copy_asm_operands_vector;
3850 else if (XVEC (orig, i) != NULL)
3852 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3853 for (j = 0; j < XVECLEN (copy, i); j++)
3854 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
3856 break;
3858 case 't':
3859 case 'w':
3860 case 'i':
3861 case 's':
3862 case 'S':
3863 case 'u':
3864 case '0':
3865 /* These are left unchanged. */
3866 break;
3868 default:
3869 abort ();
3873 if (code == SCRATCH)
3875 i = copy_insn_n_scratches++;
3876 if (i >= MAX_RECOG_OPERANDS)
3877 abort ();
3878 copy_insn_scratch_in[i] = orig;
3879 copy_insn_scratch_out[i] = copy;
3881 else if (code == ASM_OPERANDS)
3883 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
3884 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
3885 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
3886 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
3889 return copy;
3892 /* Create a new copy of an rtx.
3893 This function differs from copy_rtx in that it handles SCRATCHes and
3894 ASM_OPERANDs properly.
3895 INSN doesn't really have to be a full INSN; it could be just the
3896 pattern. */
3898 copy_insn (insn)
3899 rtx insn;
3901 copy_insn_n_scratches = 0;
3902 orig_asm_operands_vector = 0;
3903 orig_asm_constraints_vector = 0;
3904 copy_asm_operands_vector = 0;
3905 copy_asm_constraints_vector = 0;
3906 return copy_insn_1 (insn);
3909 /* Initialize data structures and variables in this file
3910 before generating rtl for each function. */
3912 void
3913 init_emit ()
3915 struct function *f = cfun;
3917 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
3918 first_insn = NULL;
3919 last_insn = NULL;
3920 seq_rtl_expr = NULL;
3921 cur_insn_uid = 1;
3922 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3923 last_linenum = 0;
3924 last_filename = 0;
3925 first_label_num = label_num;
3926 last_label_num = 0;
3927 seq_stack = NULL;
3929 clear_emit_caches ();
3931 /* Init the tables that describe all the pseudo regs. */
3933 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
3935 f->emit->regno_pointer_align
3936 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
3937 sizeof (unsigned char));
3939 regno_reg_rtx
3940 = (rtx *) xcalloc (f->emit->regno_pointer_align_length * sizeof (rtx),
3941 sizeof (rtx));
3943 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3944 init_virtual_regs (f->emit);
3946 /* Indicate that the virtual registers and stack locations are
3947 all pointers. */
3948 REG_POINTER (stack_pointer_rtx) = 1;
3949 REG_POINTER (frame_pointer_rtx) = 1;
3950 REG_POINTER (hard_frame_pointer_rtx) = 1;
3951 REG_POINTER (arg_pointer_rtx) = 1;
3953 REG_POINTER (virtual_incoming_args_rtx) = 1;
3954 REG_POINTER (virtual_stack_vars_rtx) = 1;
3955 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
3956 REG_POINTER (virtual_outgoing_args_rtx) = 1;
3957 REG_POINTER (virtual_cfa_rtx) = 1;
3959 #ifdef STACK_BOUNDARY
3960 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
3961 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3962 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3963 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
3965 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
3966 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
3967 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
3968 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
3969 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
3970 #endif
3972 #ifdef INIT_EXPANDERS
3973 INIT_EXPANDERS;
3974 #endif
3977 /* Mark SS for GC. */
3979 static void
3980 mark_sequence_stack (ss)
3981 struct sequence_stack *ss;
3983 while (ss)
3985 ggc_mark_rtx (ss->first);
3986 ggc_mark_tree (ss->sequence_rtl_expr);
3987 ss = ss->next;
3991 /* Mark ES for GC. */
3993 void
3994 mark_emit_status (es)
3995 struct emit_status *es;
3997 rtx *r;
3998 int i;
4000 if (es == 0)
4001 return;
4003 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx;
4004 i > 0; --i, ++r)
4005 ggc_mark_rtx (*r);
4007 mark_sequence_stack (es->sequence_stack);
4008 ggc_mark_tree (es->sequence_rtl_expr);
4009 ggc_mark_rtx (es->x_first_insn);
4012 /* Create some permanent unique rtl objects shared between all functions.
4013 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4015 void
4016 init_emit_once (line_numbers)
4017 int line_numbers;
4019 int i;
4020 enum machine_mode mode;
4021 enum machine_mode double_mode;
4023 /* Initialize the CONST_INT hash table. */
4024 const_int_htab = htab_create (37, const_int_htab_hash,
4025 const_int_htab_eq, NULL);
4026 ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
4027 rtx_htab_mark);
4029 no_line_numbers = ! line_numbers;
4031 /* Compute the word and byte modes. */
4033 byte_mode = VOIDmode;
4034 word_mode = VOIDmode;
4035 double_mode = VOIDmode;
4037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4038 mode = GET_MODE_WIDER_MODE (mode))
4040 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4041 && byte_mode == VOIDmode)
4042 byte_mode = mode;
4044 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4045 && word_mode == VOIDmode)
4046 word_mode = mode;
4049 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4050 mode = GET_MODE_WIDER_MODE (mode))
4052 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4053 && double_mode == VOIDmode)
4054 double_mode = mode;
4057 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4059 /* Assign register numbers to the globally defined register rtx.
4060 This must be done at runtime because the register number field
4061 is in a union and some compilers can't initialize unions. */
4063 pc_rtx = gen_rtx (PC, VOIDmode);
4064 cc0_rtx = gen_rtx (CC0, VOIDmode);
4065 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4066 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4067 if (hard_frame_pointer_rtx == 0)
4068 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4069 HARD_FRAME_POINTER_REGNUM);
4070 if (arg_pointer_rtx == 0)
4071 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4072 virtual_incoming_args_rtx =
4073 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4074 virtual_stack_vars_rtx =
4075 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4076 virtual_stack_dynamic_rtx =
4077 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4078 virtual_outgoing_args_rtx =
4079 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4080 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4082 /* These rtx must be roots if GC is enabled. */
4083 ggc_add_rtx_root (global_rtl, GR_MAX);
4085 #ifdef INIT_EXPANDERS
4086 /* This is to initialize {init|mark|free}_machine_status before the first
4087 call to push_function_context_to. This is needed by the Chill front
4088 end which calls push_function_context_to before the first cal to
4089 init_function_start. */
4090 INIT_EXPANDERS;
4091 #endif
4093 /* Create the unique rtx's for certain rtx codes and operand values. */
4095 /* Don't use gen_rtx here since gen_rtx in this case
4096 tries to use these variables. */
4097 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4098 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4099 gen_rtx_raw_CONST_INT (VOIDmode, i);
4100 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4102 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4103 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4104 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4105 else
4106 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4108 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4109 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4110 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4111 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4113 for (i = 0; i <= 2; i++)
4115 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4116 mode = GET_MODE_WIDER_MODE (mode))
4118 rtx tem = rtx_alloc (CONST_DOUBLE);
4119 union real_extract u;
4121 memset ((char *) &u, 0, sizeof u); /* Zero any holes in a structure. */
4122 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4124 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
4125 CONST_DOUBLE_MEM (tem) = cc0_rtx;
4126 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4127 PUT_MODE (tem, mode);
4129 const_tiny_rtx[i][(int) mode] = tem;
4132 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4134 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4135 mode = GET_MODE_WIDER_MODE (mode))
4136 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4138 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4139 mode != VOIDmode;
4140 mode = GET_MODE_WIDER_MODE (mode))
4141 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4144 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4145 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4146 const_tiny_rtx[0][i] = const0_rtx;
4148 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4149 if (STORE_FLAG_VALUE == 1)
4150 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4152 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4153 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4154 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4155 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4156 ggc_add_rtx_root (&const_true_rtx, 1);
4158 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4159 return_address_pointer_rtx
4160 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4161 #endif
4163 #ifdef STRUCT_VALUE
4164 struct_value_rtx = STRUCT_VALUE;
4165 #else
4166 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4167 #endif
4169 #ifdef STRUCT_VALUE_INCOMING
4170 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4171 #else
4172 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4173 struct_value_incoming_rtx
4174 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4175 #else
4176 struct_value_incoming_rtx = struct_value_rtx;
4177 #endif
4178 #endif
4180 #ifdef STATIC_CHAIN_REGNUM
4181 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4183 #ifdef STATIC_CHAIN_INCOMING_REGNUM
4184 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4185 static_chain_incoming_rtx
4186 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4187 else
4188 #endif
4189 static_chain_incoming_rtx = static_chain_rtx;
4190 #endif
4192 #ifdef STATIC_CHAIN
4193 static_chain_rtx = STATIC_CHAIN;
4195 #ifdef STATIC_CHAIN_INCOMING
4196 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4197 #else
4198 static_chain_incoming_rtx = static_chain_rtx;
4199 #endif
4200 #endif
4202 #ifdef PIC_OFFSET_TABLE_REGNUM
4203 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
4204 #endif
4206 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4207 ggc_add_rtx_root (&struct_value_rtx, 1);
4208 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4209 ggc_add_rtx_root (&static_chain_rtx, 1);
4210 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4211 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
4214 /* Query and clear/ restore no_line_numbers. This is used by the
4215 switch / case handling in stmt.c to give proper line numbers in
4216 warnings about unreachable code. */
4219 force_line_numbers ()
4221 int old = no_line_numbers;
4223 no_line_numbers = 0;
4224 if (old)
4225 force_next_line_note ();
4226 return old;
4229 void
4230 restore_line_number_status (old_value)
4231 int old_value;
4233 no_line_numbers = old_value;