PR C++/689
[official-gcc.git] / gcc / emit-rtl.c
bloba000551dc2ee16aa53325cb5056c9444ab66b132
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
61 /* Commonly used modes. */
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
72 static GTY(()) int label_num = 1;
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
78 static int last_label_num;
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
83 static int base_label_num;
85 /* Nonzero means do not generate NOTEs for source line numbers. */
87 static int no_line_numbers;
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
94 rtx global_rtl[GR_MAX];
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
108 rtx const_true_rtx;
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141 /* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145 /* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
150 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152 /* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
158 /* A hash table storing memory attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
162 /* A hash table storing register attribute structures. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
166 /* A hash table storing all CONST_DOUBLEs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
181 int));
182 static void unshare_all_rtl_1 PARAMS ((rtx));
183 static void unshare_all_decls PARAMS ((tree));
184 static void reset_used_decls PARAMS ((tree));
185 static void mark_label_nuses PARAMS ((rtx));
186 static hashval_t const_int_htab_hash PARAMS ((const void *));
187 static int const_int_htab_eq PARAMS ((const void *,
188 const void *));
189 static hashval_t const_double_htab_hash PARAMS ((const void *));
190 static int const_double_htab_eq PARAMS ((const void *,
191 const void *));
192 static rtx lookup_const_double PARAMS ((rtx));
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
195 const void *));
196 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
197 rtx, unsigned int,
198 enum machine_mode));
199 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
200 static int reg_attrs_htab_eq PARAMS ((const void *,
201 const void *));
202 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
203 static tree component_ref_for_mem_expr PARAMS ((tree));
204 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
206 /* Probability of the conditional branch currently proceeded by try_split.
207 Set to -1 otherwise. */
208 int split_branch_probability = -1;
210 /* Returns a hash code for X (which is a really a CONST_INT). */
212 static hashval_t
213 const_int_htab_hash (x)
214 const void *x;
216 return (hashval_t) INTVAL ((struct rtx_def *) x);
219 /* Returns nonzero if the value represented by X (which is really a
220 CONST_INT) is the same as that given by Y (which is really a
221 HOST_WIDE_INT *). */
223 static int
224 const_int_htab_eq (x, y)
225 const void *x;
226 const void *y;
228 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
231 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
232 static hashval_t
233 const_double_htab_hash (x)
234 const void *x;
236 rtx value = (rtx) x;
237 hashval_t h;
239 if (GET_MODE (value) == VOIDmode)
240 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
241 else
243 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
244 /* MODE is used in the comparison, so it should be in the hash. */
245 h ^= GET_MODE (value);
247 return h;
250 /* Returns nonzero if the value represented by X (really a ...)
251 is the same as that represented by Y (really a ...) */
252 static int
253 const_double_htab_eq (x, y)
254 const void *x;
255 const void *y;
257 rtx a = (rtx)x, b = (rtx)y;
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
261 if (GET_MODE (a) == VOIDmode)
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
269 /* Returns a hash code for X (which is a really a mem_attrs *). */
271 static hashval_t
272 mem_attrs_htab_hash (x)
273 const void *x;
275 mem_attrs *p = (mem_attrs *) x;
277 return (p->alias ^ (p->align * 1000)
278 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
279 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
280 ^ (size_t) p->expr);
283 /* Returns nonzero if the value represented by X (which is really a
284 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs *). */
287 static int
288 mem_attrs_htab_eq (x, y)
289 const void *x;
290 const void *y;
292 mem_attrs *p = (mem_attrs *) x;
293 mem_attrs *q = (mem_attrs *) y;
295 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
296 && p->size == q->size && p->align == q->align);
299 /* Allocate a new mem_attrs structure and insert it into the hash table if
300 one identical to it is not already in the table. We are doing this for
301 MEM of mode MODE. */
303 static mem_attrs *
304 get_mem_attrs (alias, expr, offset, size, align, mode)
305 HOST_WIDE_INT alias;
306 tree expr;
307 rtx offset;
308 rtx size;
309 unsigned int align;
310 enum machine_mode mode;
312 mem_attrs attrs;
313 void **slot;
315 /* If everything is the default, we can just return zero.
316 This must match what the corresponding MEM_* macros return when the
317 field is not present. */
318 if (alias == 0 && expr == 0 && offset == 0
319 && (size == 0
320 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
321 && (STRICT_ALIGNMENT && mode != BLKmode
322 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
323 return 0;
325 attrs.alias = alias;
326 attrs.expr = expr;
327 attrs.offset = offset;
328 attrs.size = size;
329 attrs.align = align;
331 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
332 if (*slot == 0)
334 *slot = ggc_alloc (sizeof (mem_attrs));
335 memcpy (*slot, &attrs, sizeof (mem_attrs));
338 return *slot;
341 /* Returns a hash code for X (which is a really a reg_attrs *). */
343 static hashval_t
344 reg_attrs_htab_hash (x)
345 const void *x;
347 reg_attrs *p = (reg_attrs *) x;
349 return ((p->offset * 1000) ^ (long) p->decl);
352 /* Returns non-zero if the value represented by X (which is really a
353 reg_attrs *) is the same as that given by Y (which is also really a
354 reg_attrs *). */
356 static int
357 reg_attrs_htab_eq (x, y)
358 const void *x;
359 const void *y;
361 reg_attrs *p = (reg_attrs *) x;
362 reg_attrs *q = (reg_attrs *) y;
364 return (p->decl == q->decl && p->offset == q->offset);
366 /* Allocate a new reg_attrs structure and insert it into the hash table if
367 one identical to it is not already in the table. We are doing this for
368 MEM of mode MODE. */
370 static reg_attrs *
371 get_reg_attrs (decl, offset)
372 tree decl;
373 int offset;
375 reg_attrs attrs;
376 void **slot;
378 /* If everything is the default, we can just return zero. */
379 if (decl == 0 && offset == 0)
380 return 0;
382 attrs.decl = decl;
383 attrs.offset = offset;
385 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
386 if (*slot == 0)
388 *slot = ggc_alloc (sizeof (reg_attrs));
389 memcpy (*slot, &attrs, sizeof (reg_attrs));
392 return *slot;
395 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
396 don't attempt to share with the various global pieces of rtl (such as
397 frame_pointer_rtx). */
400 gen_raw_REG (mode, regno)
401 enum machine_mode mode;
402 int regno;
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
414 gen_rtx_CONST_INT (mode, arg)
415 enum machine_mode mode ATTRIBUTE_UNUSED;
416 HOST_WIDE_INT arg;
418 void **slot;
420 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
421 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
423 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
424 if (const_true_rtx && arg == STORE_FLAG_VALUE)
425 return const_true_rtx;
426 #endif
428 /* Look up the CONST_INT in the hash table. */
429 slot = htab_find_slot_with_hash (const_int_htab, &arg,
430 (hashval_t) arg, INSERT);
431 if (*slot == 0)
432 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
434 return (rtx) *slot;
438 gen_int_mode (c, mode)
439 HOST_WIDE_INT c;
440 enum machine_mode mode;
442 return GEN_INT (trunc_int_for_mode (c, mode));
445 /* CONST_DOUBLEs might be created from pairs of integers, or from
446 REAL_VALUE_TYPEs. Also, their length is known only at run time,
447 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
449 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
450 hash table. If so, return its counterpart; otherwise add it
451 to the hash table and return it. */
452 static rtx
453 lookup_const_double (real)
454 rtx real;
456 void **slot = htab_find_slot (const_double_htab, real, INSERT);
457 if (*slot == 0)
458 *slot = real;
460 return (rtx) *slot;
463 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
464 VALUE in mode MODE. */
466 const_double_from_real_value (value, mode)
467 REAL_VALUE_TYPE value;
468 enum machine_mode mode;
470 rtx real = rtx_alloc (CONST_DOUBLE);
471 PUT_MODE (real, mode);
473 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
475 return lookup_const_double (real);
478 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
479 of ints: I0 is the low-order word and I1 is the high-order word.
480 Do not use this routine for non-integer modes; convert to
481 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
484 immed_double_const (i0, i1, mode)
485 HOST_WIDE_INT i0, i1;
486 enum machine_mode mode;
488 rtx value;
489 unsigned int i;
491 if (mode != VOIDmode)
493 int width;
494 if (GET_MODE_CLASS (mode) != MODE_INT
495 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
496 /* We can get a 0 for an error mark. */
497 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
498 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
499 abort ();
501 /* We clear out all bits that don't belong in MODE, unless they and
502 our sign bit are all one. So we get either a reasonable negative
503 value or a reasonable unsigned value for this mode. */
504 width = GET_MODE_BITSIZE (mode);
505 if (width < HOST_BITS_PER_WIDE_INT
506 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
507 != ((HOST_WIDE_INT) (-1) << (width - 1))))
508 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
509 else if (width == HOST_BITS_PER_WIDE_INT
510 && ! (i1 == ~0 && i0 < 0))
511 i1 = 0;
512 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
513 /* We cannot represent this value as a constant. */
514 abort ();
516 /* If this would be an entire word for the target, but is not for
517 the host, then sign-extend on the host so that the number will
518 look the same way on the host that it would on the target.
520 For example, when building a 64 bit alpha hosted 32 bit sparc
521 targeted compiler, then we want the 32 bit unsigned value -1 to be
522 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
523 The latter confuses the sparc backend. */
525 if (width < HOST_BITS_PER_WIDE_INT
526 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
527 i0 |= ((HOST_WIDE_INT) (-1) << width);
529 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
530 CONST_INT.
532 ??? Strictly speaking, this is wrong if we create a CONST_INT for
533 a large unsigned constant with the size of MODE being
534 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
535 in a wider mode. In that case we will mis-interpret it as a
536 negative number.
538 Unfortunately, the only alternative is to make a CONST_DOUBLE for
539 any constant in any mode if it is an unsigned constant larger
540 than the maximum signed integer in an int on the host. However,
541 doing this will break everyone that always expects to see a
542 CONST_INT for SImode and smaller.
544 We have always been making CONST_INTs in this case, so nothing
545 new is being broken. */
547 if (width <= HOST_BITS_PER_WIDE_INT)
548 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
551 /* If this integer fits in one word, return a CONST_INT. */
552 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
553 return GEN_INT (i0);
555 /* We use VOIDmode for integers. */
556 value = rtx_alloc (CONST_DOUBLE);
557 PUT_MODE (value, VOIDmode);
559 CONST_DOUBLE_LOW (value) = i0;
560 CONST_DOUBLE_HIGH (value) = i1;
562 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
563 XWINT (value, i) = 0;
565 return lookup_const_double (value);
569 gen_rtx_REG (mode, regno)
570 enum machine_mode mode;
571 unsigned int regno;
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (mode, addr)
637 enum machine_mode mode;
638 rtx addr;
640 rtx rt = gen_rtx_raw_MEM (mode, addr);
642 /* This field is not cleared by the mere allocation of the rtx, so
643 we clear it here. */
644 MEM_ATTRS (rt) = 0;
646 return rt;
650 gen_rtx_SUBREG (mode, reg, offset)
651 enum machine_mode mode;
652 rtx reg;
653 int offset;
655 /* This is the most common failure type.
656 Catch it early so we can see who does it. */
657 if ((offset % GET_MODE_SIZE (mode)) != 0)
658 abort ();
660 /* This check isn't usable right now because combine will
661 throw arbitrary crap like a CALL into a SUBREG in
662 gen_lowpart_for_combine so we must just eat it. */
663 #if 0
664 /* Check for this too. */
665 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
666 abort ();
667 #endif
668 return gen_rtx_raw_SUBREG (mode, reg, offset);
671 /* Generate a SUBREG representing the least-significant part of REG if MODE
672 is smaller than mode of REG, otherwise paradoxical SUBREG. */
675 gen_lowpart_SUBREG (mode, reg)
676 enum machine_mode mode;
677 rtx reg;
679 enum machine_mode inmode;
681 inmode = GET_MODE (reg);
682 if (inmode == VOIDmode)
683 inmode = mode;
684 return gen_rtx_SUBREG (mode, reg,
685 subreg_lowpart_offset (mode, inmode));
688 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
690 ** This routine generates an RTX of the size specified by
691 ** <code>, which is an RTX code. The RTX structure is initialized
692 ** from the arguments <element1> through <elementn>, which are
693 ** interpreted according to the specific RTX type's format. The
694 ** special machine mode associated with the rtx (if any) is specified
695 ** in <mode>.
697 ** gen_rtx can be invoked in a way which resembles the lisp-like
698 ** rtx it will generate. For example, the following rtx structure:
700 ** (plus:QI (mem:QI (reg:SI 1))
701 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
703 ** ...would be generated by the following C code:
705 ** gen_rtx (PLUS, QImode,
706 ** gen_rtx (MEM, QImode,
707 ** gen_rtx (REG, SImode, 1)),
708 ** gen_rtx (MEM, QImode,
709 ** gen_rtx (PLUS, SImode,
710 ** gen_rtx (REG, SImode, 2),
711 ** gen_rtx (REG, SImode, 3)))),
714 /*VARARGS2*/
716 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
718 int i; /* Array indices... */
719 const char *fmt; /* Current rtx's format... */
720 rtx rt_val; /* RTX to return to caller... */
722 VA_OPEN (p, mode);
723 VA_FIXEDARG (p, enum rtx_code, code);
724 VA_FIXEDARG (p, enum machine_mode, mode);
726 switch (code)
728 case CONST_INT:
729 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
730 break;
732 case CONST_DOUBLE:
734 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
735 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
737 rt_val = immed_double_const (arg0, arg1, mode);
739 break;
741 case REG:
742 rt_val = gen_rtx_REG (mode, va_arg (p, int));
743 break;
745 case MEM:
746 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
747 break;
749 default:
750 rt_val = rtx_alloc (code); /* Allocate the storage space. */
751 rt_val->mode = mode; /* Store the machine mode... */
753 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
754 for (i = 0; i < GET_RTX_LENGTH (code); i++)
756 switch (*fmt++)
758 case '0': /* Field with unknown use. Zero it. */
759 X0EXP (rt_val, i) = NULL_RTX;
760 break;
762 case 'i': /* An integer? */
763 XINT (rt_val, i) = va_arg (p, int);
764 break;
766 case 'w': /* A wide integer? */
767 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
768 break;
770 case 's': /* A string? */
771 XSTR (rt_val, i) = va_arg (p, char *);
772 break;
774 case 'e': /* An expression? */
775 case 'u': /* An insn? Same except when printing. */
776 XEXP (rt_val, i) = va_arg (p, rtx);
777 break;
779 case 'E': /* An RTX vector? */
780 XVEC (rt_val, i) = va_arg (p, rtvec);
781 break;
783 case 'b': /* A bitmap? */
784 XBITMAP (rt_val, i) = va_arg (p, bitmap);
785 break;
787 case 't': /* A tree? */
788 XTREE (rt_val, i) = va_arg (p, tree);
789 break;
791 default:
792 abort ();
795 break;
798 VA_CLOSE (p);
799 return rt_val;
802 /* gen_rtvec (n, [rt1, ..., rtn])
804 ** This routine creates an rtvec and stores within it the
805 ** pointers to rtx's which are its arguments.
808 /*VARARGS1*/
809 rtvec
810 gen_rtvec VPARAMS ((int n, ...))
812 int i, save_n;
813 rtx *vector;
815 VA_OPEN (p, n);
816 VA_FIXEDARG (p, int, n);
818 if (n == 0)
819 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
821 vector = (rtx *) alloca (n * sizeof (rtx));
823 for (i = 0; i < n; i++)
824 vector[i] = va_arg (p, rtx);
826 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
827 save_n = n;
828 VA_CLOSE (p);
830 return gen_rtvec_v (save_n, vector);
833 rtvec
834 gen_rtvec_v (n, argp)
835 int n;
836 rtx *argp;
838 int i;
839 rtvec rt_val;
841 if (n == 0)
842 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
844 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
846 for (i = 0; i < n; i++)
847 rt_val->elem[i] = *argp++;
849 return rt_val;
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (mode)
857 enum machine_mode mode;
859 struct function *f = cfun;
860 rtx val;
862 /* Don't let anything called after initial flow analysis create new
863 registers. */
864 if (no_new_pseudos)
865 abort ();
867 if (generating_concat_p
868 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
869 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
871 /* For complex modes, don't make a single pseudo.
872 Instead, make a CONCAT of two pseudos.
873 This allows noncontiguous allocation of the real and imaginary parts,
874 which makes much better code. Besides, allocating DCmode
875 pseudos overstrains reload on some machines like the 386. */
876 rtx realpart, imagpart;
877 enum machine_mode partmode = GET_MODE_INNER (mode);
879 realpart = gen_reg_rtx (partmode);
880 imagpart = gen_reg_rtx (partmode);
881 return gen_rtx_CONCAT (mode, realpart, imagpart);
884 /* Make sure regno_pointer_align, and regno_reg_rtx are large
885 enough to have an element for this pseudo reg number. */
887 if (reg_rtx_no == f->emit->regno_pointer_align_length)
889 int old_size = f->emit->regno_pointer_align_length;
890 char *new;
891 rtx *new1;
893 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
894 memset (new + old_size, 0, old_size);
895 f->emit->regno_pointer_align = (unsigned char *) new;
897 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
898 old_size * 2 * sizeof (rtx));
899 memset (new1 + old_size, 0, old_size * sizeof (rtx));
900 regno_reg_rtx = new1;
902 f->emit->regno_pointer_align_length = old_size * 2;
905 val = gen_raw_REG (mode, reg_rtx_no);
906 regno_reg_rtx[reg_rtx_no++] = val;
907 return val;
910 /* Generate an register with same attributes as REG,
911 but offsetted by OFFSET. */
914 gen_rtx_REG_offset (reg, mode, regno, offset)
915 enum machine_mode mode;
916 unsigned int regno;
917 int offset;
918 rtx reg;
920 rtx new = gen_rtx_REG (mode, regno);
921 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
923 return new;
926 /* Set the decl for MEM to DECL. */
928 void
929 set_reg_attrs_from_mem (reg, mem)
930 rtx reg;
931 rtx mem;
933 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
934 REG_ATTRS (reg)
935 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
938 /* Set the register attributes for registers contained in PARM_RTX.
939 Use needed values from memory attributes of MEM. */
941 void
942 set_reg_attrs_for_parm (parm_rtx, mem)
943 rtx parm_rtx;
944 rtx mem;
946 if (GET_CODE (parm_rtx) == REG)
947 set_reg_attrs_from_mem (parm_rtx, mem);
948 else if (GET_CODE (parm_rtx) == PARALLEL)
950 /* Check for a NULL entry in the first slot, used to indicate that the
951 parameter goes both on the stack and in registers. */
952 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
953 for (; i < XVECLEN (parm_rtx, 0); i++)
955 rtx x = XVECEXP (parm_rtx, 0, i);
956 if (GET_CODE (XEXP (x, 0)) == REG)
957 REG_ATTRS (XEXP (x, 0))
958 = get_reg_attrs (MEM_EXPR (mem),
959 INTVAL (XEXP (x, 1)));
964 /* Assign the RTX X to declaration T. */
965 void
966 set_decl_rtl (t, x)
967 tree t;
968 rtx x;
970 DECL_CHECK (t)->decl.rtl = x;
972 if (!x)
973 return;
974 /* For register, we maitain the reverse information too. */
975 if (GET_CODE (x) == REG)
976 REG_ATTRS (x) = get_reg_attrs (t, 0);
977 else if (GET_CODE (x) == SUBREG)
978 REG_ATTRS (SUBREG_REG (x))
979 = get_reg_attrs (t, -SUBREG_BYTE (x));
980 if (GET_CODE (x) == CONCAT)
982 if (REG_P (XEXP (x, 0)))
983 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
984 if (REG_P (XEXP (x, 1)))
985 REG_ATTRS (XEXP (x, 1))
986 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
988 if (GET_CODE (x) == PARALLEL)
990 int i;
991 for (i = 0; i < XVECLEN (x, 0); i++)
993 rtx y = XVECEXP (x, 0, i);
994 if (REG_P (XEXP (y, 0)))
995 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1000 /* Identify REG (which may be a CONCAT) as a user register. */
1002 void
1003 mark_user_reg (reg)
1004 rtx reg;
1006 if (GET_CODE (reg) == CONCAT)
1008 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1009 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1011 else if (GET_CODE (reg) == REG)
1012 REG_USERVAR_P (reg) = 1;
1013 else
1014 abort ();
1017 /* Identify REG as a probable pointer register and show its alignment
1018 as ALIGN, if nonzero. */
1020 void
1021 mark_reg_pointer (reg, align)
1022 rtx reg;
1023 int align;
1025 if (! REG_POINTER (reg))
1027 REG_POINTER (reg) = 1;
1029 if (align)
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1032 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1033 /* We can no-longer be sure just how aligned this pointer is */
1034 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1037 /* Return 1 plus largest pseudo reg number used in the current function. */
1040 max_reg_num ()
1042 return reg_rtx_no;
1045 /* Return 1 + the largest label number used so far in the current function. */
1048 max_label_num ()
1050 if (last_label_num && label_num == base_label_num)
1051 return last_label_num;
1052 return label_num;
1055 /* Return first label number used in this function (if any were used). */
1058 get_first_label_num ()
1060 return first_label_num;
1063 /* Return the final regno of X, which is a SUBREG of a hard
1064 register. */
1066 subreg_hard_regno (x, check_mode)
1067 rtx x;
1068 int check_mode;
1070 enum machine_mode mode = GET_MODE (x);
1071 unsigned int byte_offset, base_regno, final_regno;
1072 rtx reg = SUBREG_REG (x);
1074 /* This is where we attempt to catch illegal subregs
1075 created by the compiler. */
1076 if (GET_CODE (x) != SUBREG
1077 || GET_CODE (reg) != REG)
1078 abort ();
1079 base_regno = REGNO (reg);
1080 if (base_regno >= FIRST_PSEUDO_REGISTER)
1081 abort ();
1082 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1083 abort ();
1084 #ifdef ENABLE_CHECKING
1085 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1086 SUBREG_BYTE (x), mode))
1087 abort ();
1088 #endif
1089 /* Catch non-congruent offsets too. */
1090 byte_offset = SUBREG_BYTE (x);
1091 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1092 abort ();
1094 final_regno = subreg_regno (x);
1096 return final_regno;
1099 /* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
1101 between floating-point and fixed-point values, rather, the bit
1102 representation is returned.
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1108 If this is not a case we can handle, return 0. */
1111 gen_lowpart_common (mode, x)
1112 enum machine_mode mode;
1113 rtx x;
1115 int msize = GET_MODE_SIZE (mode);
1116 int xsize = GET_MODE_SIZE (GET_MODE (x));
1117 int offset = 0;
1119 if (GET_MODE (x) == mode)
1120 return x;
1122 /* MODE must occupy no more words than the mode of X. */
1123 if (GET_MODE (x) != VOIDmode
1124 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1125 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1126 return 0;
1128 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1129 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1130 && GET_MODE (x) != VOIDmode && msize > xsize)
1131 return 0;
1133 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1135 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1136 && (GET_MODE_CLASS (mode) == MODE_INT
1137 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1139 /* If we are getting the low-order part of something that has been
1140 sign- or zero-extended, we can either just use the object being
1141 extended or make a narrower extension. If we want an even smaller
1142 piece than the size of the object being extended, call ourselves
1143 recursively.
1145 This case is used mostly by combine and cse. */
1147 if (GET_MODE (XEXP (x, 0)) == mode)
1148 return XEXP (x, 0);
1149 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1150 return gen_lowpart_common (mode, XEXP (x, 0));
1151 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1152 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1154 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1155 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1156 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1157 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1158 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1159 && GET_MODE (x) == VOIDmode)
1160 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1161 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1162 from the low-order part of the constant. */
1163 else if ((GET_MODE_CLASS (mode) == MODE_INT
1164 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1165 && GET_MODE (x) == VOIDmode
1166 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1168 /* If MODE is twice the host word size, X is already the desired
1169 representation. Otherwise, if MODE is wider than a word, we can't
1170 do this. If MODE is exactly a word, return just one CONST_INT. */
1172 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1173 return x;
1174 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1175 return 0;
1176 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1177 return (GET_CODE (x) == CONST_INT ? x
1178 : GEN_INT (CONST_DOUBLE_LOW (x)));
1179 else
1181 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1182 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1183 : CONST_DOUBLE_LOW (x));
1185 /* Sign extend to HOST_WIDE_INT. */
1186 val = trunc_int_for_mode (val, mode);
1188 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1189 : GEN_INT (val));
1193 /* The floating-point emulator can handle all conversions between
1194 FP and integer operands. This simplifies reload because it
1195 doesn't have to deal with constructs like (subreg:DI
1196 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1197 /* Single-precision floats are always 32-bits and double-precision
1198 floats are always 64-bits. */
1200 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1201 && GET_MODE_BITSIZE (mode) == 32
1202 && GET_CODE (x) == CONST_INT)
1204 REAL_VALUE_TYPE r;
1205 long i = INTVAL (x);
1207 real_from_target (&r, &i, mode);
1208 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1210 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1211 && GET_MODE_BITSIZE (mode) == 64
1212 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1213 && GET_MODE (x) == VOIDmode)
1215 REAL_VALUE_TYPE r;
1216 HOST_WIDE_INT low, high;
1217 long i[2];
1219 if (GET_CODE (x) == CONST_INT)
1221 low = INTVAL (x);
1222 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1224 else
1226 low = CONST_DOUBLE_LOW (x);
1227 high = CONST_DOUBLE_HIGH (x);
1230 if (HOST_BITS_PER_WIDE_INT > 32)
1231 high = low >> 31 >> 1;
1233 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1234 target machine. */
1235 if (WORDS_BIG_ENDIAN)
1236 i[0] = high, i[1] = low;
1237 else
1238 i[0] = low, i[1] = high;
1240 real_from_target (&r, i, mode);
1241 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1243 else if ((GET_MODE_CLASS (mode) == MODE_INT
1244 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1245 && GET_CODE (x) == CONST_DOUBLE
1246 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1248 REAL_VALUE_TYPE r;
1249 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1250 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1252 /* Convert 'r' into an array of four 32-bit words in target word
1253 order. */
1254 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1255 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1257 case 32:
1258 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1259 i[1] = 0;
1260 i[2] = 0;
1261 i[3 - 3 * endian] = 0;
1262 break;
1263 case 64:
1264 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1265 i[2 - 2 * endian] = 0;
1266 i[3 - 2 * endian] = 0;
1267 break;
1268 case 96:
1269 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1270 i[3 - 3 * endian] = 0;
1271 break;
1272 case 128:
1273 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1274 break;
1275 default:
1276 abort ();
1278 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1279 and return it. */
1280 #if HOST_BITS_PER_WIDE_INT == 32
1281 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1282 #else
1283 if (HOST_BITS_PER_WIDE_INT != 64)
1284 abort ();
1286 return immed_double_const ((((unsigned long) i[3 * endian])
1287 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1288 (((unsigned long) i[2 - endian])
1289 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1290 mode);
1291 #endif
1294 /* Otherwise, we can't do this. */
1295 return 0;
1298 /* Return the real part (which has mode MODE) of a complex value X.
1299 This always comes at the low address in memory. */
1302 gen_realpart (mode, x)
1303 enum machine_mode mode;
1304 rtx x;
1306 if (WORDS_BIG_ENDIAN
1307 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1308 && REG_P (x)
1309 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1310 internal_error
1311 ("can't access real part of complex value in hard register");
1312 else if (WORDS_BIG_ENDIAN)
1313 return gen_highpart (mode, x);
1314 else
1315 return gen_lowpart (mode, x);
1318 /* Return the imaginary part (which has mode MODE) of a complex value X.
1319 This always comes at the high address in memory. */
1322 gen_imagpart (mode, x)
1323 enum machine_mode mode;
1324 rtx x;
1326 if (WORDS_BIG_ENDIAN)
1327 return gen_lowpart (mode, x);
1328 else if (! WORDS_BIG_ENDIAN
1329 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1330 && REG_P (x)
1331 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1332 internal_error
1333 ("can't access imaginary part of complex value in hard register");
1334 else
1335 return gen_highpart (mode, x);
1338 /* Return 1 iff X, assumed to be a SUBREG,
1339 refers to the real part of the complex value in its containing reg.
1340 Complex values are always stored with the real part in the first word,
1341 regardless of WORDS_BIG_ENDIAN. */
1344 subreg_realpart_p (x)
1345 rtx x;
1347 if (GET_CODE (x) != SUBREG)
1348 abort ();
1350 return ((unsigned int) SUBREG_BYTE (x)
1351 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1354 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1355 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1356 least-significant part of X.
1357 MODE specifies how big a part of X to return;
1358 it usually should not be larger than a word.
1359 If X is a MEM whose address is a QUEUED, the value may be so also. */
1362 gen_lowpart (mode, x)
1363 enum machine_mode mode;
1364 rtx x;
1366 rtx result = gen_lowpart_common (mode, x);
1368 if (result)
1369 return result;
1370 else if (GET_CODE (x) == REG)
1372 /* Must be a hard reg that's not valid in MODE. */
1373 result = gen_lowpart_common (mode, copy_to_reg (x));
1374 if (result == 0)
1375 abort ();
1376 return result;
1378 else if (GET_CODE (x) == MEM)
1380 /* The only additional case we can do is MEM. */
1381 int offset = 0;
1383 /* The following exposes the use of "x" to CSE. */
1384 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1385 && SCALAR_INT_MODE_P (GET_MODE (x))
1386 && ! no_new_pseudos)
1387 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1389 if (WORDS_BIG_ENDIAN)
1390 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1391 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1393 if (BYTES_BIG_ENDIAN)
1394 /* Adjust the address so that the address-after-the-data
1395 is unchanged. */
1396 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1397 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1399 return adjust_address (x, mode, offset);
1401 else if (GET_CODE (x) == ADDRESSOF)
1402 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1403 else
1404 abort ();
1407 /* Like `gen_lowpart', but refer to the most significant part.
1408 This is used to access the imaginary part of a complex number. */
1411 gen_highpart (mode, x)
1412 enum machine_mode mode;
1413 rtx x;
1415 unsigned int msize = GET_MODE_SIZE (mode);
1416 rtx result;
1418 /* This case loses if X is a subreg. To catch bugs early,
1419 complain if an invalid MODE is used even in other cases. */
1420 if (msize > UNITS_PER_WORD
1421 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1422 abort ();
1424 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1425 subreg_highpart_offset (mode, GET_MODE (x)));
1427 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1428 the target if we have a MEM. gen_highpart must return a valid operand,
1429 emitting code if necessary to do so. */
1430 if (result != NULL_RTX && GET_CODE (result) == MEM)
1431 result = validize_mem (result);
1433 if (!result)
1434 abort ();
1435 return result;
1438 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1439 be VOIDmode constant. */
1441 gen_highpart_mode (outermode, innermode, exp)
1442 enum machine_mode outermode, innermode;
1443 rtx exp;
1445 if (GET_MODE (exp) != VOIDmode)
1447 if (GET_MODE (exp) != innermode)
1448 abort ();
1449 return gen_highpart (outermode, exp);
1451 return simplify_gen_subreg (outermode, exp, innermode,
1452 subreg_highpart_offset (outermode, innermode));
1455 /* Return offset in bytes to get OUTERMODE low part
1456 of the value in mode INNERMODE stored in memory in target format. */
1458 unsigned int
1459 subreg_lowpart_offset (outermode, innermode)
1460 enum machine_mode outermode, innermode;
1462 unsigned int offset = 0;
1463 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1465 if (difference > 0)
1467 if (WORDS_BIG_ENDIAN)
1468 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1469 if (BYTES_BIG_ENDIAN)
1470 offset += difference % UNITS_PER_WORD;
1473 return offset;
1476 /* Return offset in bytes to get OUTERMODE high part
1477 of the value in mode INNERMODE stored in memory in target format. */
1478 unsigned int
1479 subreg_highpart_offset (outermode, innermode)
1480 enum machine_mode outermode, innermode;
1482 unsigned int offset = 0;
1483 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1485 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1486 abort ();
1488 if (difference > 0)
1490 if (! WORDS_BIG_ENDIAN)
1491 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1492 if (! BYTES_BIG_ENDIAN)
1493 offset += difference % UNITS_PER_WORD;
1496 return offset;
1499 /* Return 1 iff X, assumed to be a SUBREG,
1500 refers to the least significant part of its containing reg.
1501 If X is not a SUBREG, always return 1 (it is its own low part!). */
1504 subreg_lowpart_p (x)
1505 rtx x;
1507 if (GET_CODE (x) != SUBREG)
1508 return 1;
1509 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1510 return 0;
1512 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1513 == SUBREG_BYTE (x));
1517 /* Helper routine for all the constant cases of operand_subword.
1518 Some places invoke this directly. */
1521 constant_subword (op, offset, mode)
1522 rtx op;
1523 int offset;
1524 enum machine_mode mode;
1526 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1527 HOST_WIDE_INT val;
1529 /* If OP is already an integer word, return it. */
1530 if (GET_MODE_CLASS (mode) == MODE_INT
1531 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1532 return op;
1534 /* The output is some bits, the width of the target machine's word.
1535 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1536 host can't. */
1537 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1538 && GET_MODE_CLASS (mode) == MODE_FLOAT
1539 && GET_MODE_BITSIZE (mode) == 64
1540 && GET_CODE (op) == CONST_DOUBLE)
1542 long k[2];
1543 REAL_VALUE_TYPE rv;
1545 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1546 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1548 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1549 which the words are written depends on the word endianness.
1550 ??? This is a potential portability problem and should
1551 be fixed at some point.
1553 We must exercise caution with the sign bit. By definition there
1554 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1555 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1556 So we explicitly mask and sign-extend as necessary. */
1557 if (BITS_PER_WORD == 32)
1559 val = k[offset];
1560 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1561 return GEN_INT (val);
1563 #if HOST_BITS_PER_WIDE_INT >= 64
1564 else if (BITS_PER_WORD >= 64 && offset == 0)
1566 val = k[! WORDS_BIG_ENDIAN];
1567 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1568 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1569 return GEN_INT (val);
1571 #endif
1572 else if (BITS_PER_WORD == 16)
1574 val = k[offset >> 1];
1575 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1576 val >>= 16;
1577 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1578 return GEN_INT (val);
1580 else
1581 abort ();
1583 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1584 && GET_MODE_CLASS (mode) == MODE_FLOAT
1585 && GET_MODE_BITSIZE (mode) > 64
1586 && GET_CODE (op) == CONST_DOUBLE)
1588 long k[4];
1589 REAL_VALUE_TYPE rv;
1591 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1592 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1594 if (BITS_PER_WORD == 32)
1596 val = k[offset];
1597 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1598 return GEN_INT (val);
1600 #if HOST_BITS_PER_WIDE_INT >= 64
1601 else if (BITS_PER_WORD >= 64 && offset <= 1)
1603 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1604 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1605 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1606 return GEN_INT (val);
1608 #endif
1609 else
1610 abort ();
1613 /* Single word float is a little harder, since single- and double-word
1614 values often do not have the same high-order bits. We have already
1615 verified that we want the only defined word of the single-word value. */
1616 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1617 && GET_MODE_BITSIZE (mode) == 32
1618 && GET_CODE (op) == CONST_DOUBLE)
1620 long l;
1621 REAL_VALUE_TYPE rv;
1623 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1624 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1626 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1627 val = l;
1628 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1630 if (BITS_PER_WORD == 16)
1632 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1633 val >>= 16;
1634 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1637 return GEN_INT (val);
1640 /* The only remaining cases that we can handle are integers.
1641 Convert to proper endianness now since these cases need it.
1642 At this point, offset == 0 means the low-order word.
1644 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1645 in general. However, if OP is (const_int 0), we can just return
1646 it for any word. */
1648 if (op == const0_rtx)
1649 return op;
1651 if (GET_MODE_CLASS (mode) != MODE_INT
1652 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1653 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1654 return 0;
1656 if (WORDS_BIG_ENDIAN)
1657 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1659 /* Find out which word on the host machine this value is in and get
1660 it from the constant. */
1661 val = (offset / size_ratio == 0
1662 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1663 : (GET_CODE (op) == CONST_INT
1664 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1666 /* Get the value we want into the low bits of val. */
1667 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1668 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1670 val = trunc_int_for_mode (val, word_mode);
1672 return GEN_INT (val);
1675 /* Return subword OFFSET of operand OP.
1676 The word number, OFFSET, is interpreted as the word number starting
1677 at the low-order address. OFFSET 0 is the low-order word if not
1678 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1680 If we cannot extract the required word, we return zero. Otherwise,
1681 an rtx corresponding to the requested word will be returned.
1683 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1684 reload has completed, a valid address will always be returned. After
1685 reload, if a valid address cannot be returned, we return zero.
1687 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1688 it is the responsibility of the caller.
1690 MODE is the mode of OP in case it is a CONST_INT.
1692 ??? This is still rather broken for some cases. The problem for the
1693 moment is that all callers of this thing provide no 'goal mode' to
1694 tell us to work with. This exists because all callers were written
1695 in a word based SUBREG world.
1696 Now use of this function can be deprecated by simplify_subreg in most
1697 cases.
1701 operand_subword (op, offset, validate_address, mode)
1702 rtx op;
1703 unsigned int offset;
1704 int validate_address;
1705 enum machine_mode mode;
1707 if (mode == VOIDmode)
1708 mode = GET_MODE (op);
1710 if (mode == VOIDmode)
1711 abort ();
1713 /* If OP is narrower than a word, fail. */
1714 if (mode != BLKmode
1715 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1716 return 0;
1718 /* If we want a word outside OP, return zero. */
1719 if (mode != BLKmode
1720 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1721 return const0_rtx;
1723 /* Form a new MEM at the requested address. */
1724 if (GET_CODE (op) == MEM)
1726 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1728 if (! validate_address)
1729 return new;
1731 else if (reload_completed)
1733 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1734 return 0;
1736 else
1737 return replace_equiv_address (new, XEXP (new, 0));
1740 /* Rest can be handled by simplify_subreg. */
1741 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1744 /* Similar to `operand_subword', but never return 0. If we can't extract
1745 the required subword, put OP into a register and try again. If that fails,
1746 abort. We always validate the address in this case.
1748 MODE is the mode of OP, in case it is CONST_INT. */
1751 operand_subword_force (op, offset, mode)
1752 rtx op;
1753 unsigned int offset;
1754 enum machine_mode mode;
1756 rtx result = operand_subword (op, offset, 1, mode);
1758 if (result)
1759 return result;
1761 if (mode != BLKmode && mode != VOIDmode)
1763 /* If this is a register which can not be accessed by words, copy it
1764 to a pseudo register. */
1765 if (GET_CODE (op) == REG)
1766 op = copy_to_reg (op);
1767 else
1768 op = force_reg (mode, op);
1771 result = operand_subword (op, offset, 1, mode);
1772 if (result == 0)
1773 abort ();
1775 return result;
1778 /* Given a compare instruction, swap the operands.
1779 A test instruction is changed into a compare of 0 against the operand. */
1781 void
1782 reverse_comparison (insn)
1783 rtx insn;
1785 rtx body = PATTERN (insn);
1786 rtx comp;
1788 if (GET_CODE (body) == SET)
1789 comp = SET_SRC (body);
1790 else
1791 comp = SET_SRC (XVECEXP (body, 0, 0));
1793 if (GET_CODE (comp) == COMPARE)
1795 rtx op0 = XEXP (comp, 0);
1796 rtx op1 = XEXP (comp, 1);
1797 XEXP (comp, 0) = op1;
1798 XEXP (comp, 1) = op0;
1800 else
1802 rtx new = gen_rtx_COMPARE (VOIDmode,
1803 CONST0_RTX (GET_MODE (comp)), comp);
1804 if (GET_CODE (body) == SET)
1805 SET_SRC (body) = new;
1806 else
1807 SET_SRC (XVECEXP (body, 0, 0)) = new;
1811 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1812 or (2) a component ref of something variable. Represent the later with
1813 a NULL expression. */
1815 static tree
1816 component_ref_for_mem_expr (ref)
1817 tree ref;
1819 tree inner = TREE_OPERAND (ref, 0);
1821 if (TREE_CODE (inner) == COMPONENT_REF)
1822 inner = component_ref_for_mem_expr (inner);
1823 else
1825 tree placeholder_ptr = 0;
1827 /* Now remove any conversions: they don't change what the underlying
1828 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1829 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1830 || TREE_CODE (inner) == NON_LVALUE_EXPR
1831 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1832 || TREE_CODE (inner) == SAVE_EXPR
1833 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1834 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1835 inner = find_placeholder (inner, &placeholder_ptr);
1836 else
1837 inner = TREE_OPERAND (inner, 0);
1839 if (! DECL_P (inner))
1840 inner = NULL_TREE;
1843 if (inner == TREE_OPERAND (ref, 0))
1844 return ref;
1845 else
1846 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1847 TREE_OPERAND (ref, 1));
1850 /* Given REF, a MEM, and T, either the type of X or the expression
1851 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1852 if we are making a new object of this type. BITPOS is nonzero if
1853 there is an offset outstanding on T that will be applied later. */
1855 void
1856 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1857 rtx ref;
1858 tree t;
1859 int objectp;
1860 HOST_WIDE_INT bitpos;
1862 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1863 tree expr = MEM_EXPR (ref);
1864 rtx offset = MEM_OFFSET (ref);
1865 rtx size = MEM_SIZE (ref);
1866 unsigned int align = MEM_ALIGN (ref);
1867 HOST_WIDE_INT apply_bitpos = 0;
1868 tree type;
1870 /* It can happen that type_for_mode was given a mode for which there
1871 is no language-level type. In which case it returns NULL, which
1872 we can see here. */
1873 if (t == NULL_TREE)
1874 return;
1876 type = TYPE_P (t) ? t : TREE_TYPE (t);
1878 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1879 wrong answer, as it assumes that DECL_RTL already has the right alias
1880 info. Callers should not set DECL_RTL until after the call to
1881 set_mem_attributes. */
1882 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1883 abort ();
1885 /* Get the alias set from the expression or type (perhaps using a
1886 front-end routine) and use it. */
1887 alias = get_alias_set (t);
1889 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1890 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1891 RTX_UNCHANGING_P (ref)
1892 |= ((lang_hooks.honor_readonly
1893 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1894 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1896 /* If we are making an object of this type, or if this is a DECL, we know
1897 that it is a scalar if the type is not an aggregate. */
1898 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1899 MEM_SCALAR_P (ref) = 1;
1901 /* We can set the alignment from the type if we are making an object,
1902 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1903 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1904 align = MAX (align, TYPE_ALIGN (type));
1906 /* If the size is known, we can set that. */
1907 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1908 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1910 /* If T is not a type, we may be able to deduce some more information about
1911 the expression. */
1912 if (! TYPE_P (t))
1914 maybe_set_unchanging (ref, t);
1915 if (TREE_THIS_VOLATILE (t))
1916 MEM_VOLATILE_P (ref) = 1;
1918 /* Now remove any conversions: they don't change what the underlying
1919 object is. Likewise for SAVE_EXPR. */
1920 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1921 || TREE_CODE (t) == NON_LVALUE_EXPR
1922 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1923 || TREE_CODE (t) == SAVE_EXPR)
1924 t = TREE_OPERAND (t, 0);
1926 /* If this expression can't be addressed (e.g., it contains a reference
1927 to a non-addressable field), show we don't change its alias set. */
1928 if (! can_address_p (t))
1929 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1931 /* If this is a decl, set the attributes of the MEM from it. */
1932 if (DECL_P (t))
1934 expr = t;
1935 offset = const0_rtx;
1936 apply_bitpos = bitpos;
1937 size = (DECL_SIZE_UNIT (t)
1938 && host_integerp (DECL_SIZE_UNIT (t), 1)
1939 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1940 align = DECL_ALIGN (t);
1943 /* If this is a constant, we know the alignment. */
1944 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1946 align = TYPE_ALIGN (type);
1947 #ifdef CONSTANT_ALIGNMENT
1948 align = CONSTANT_ALIGNMENT (t, align);
1949 #endif
1952 /* If this is a field reference and not a bit-field, record it. */
1953 /* ??? There is some information that can be gleened from bit-fields,
1954 such as the word offset in the structure that might be modified.
1955 But skip it for now. */
1956 else if (TREE_CODE (t) == COMPONENT_REF
1957 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1959 expr = component_ref_for_mem_expr (t);
1960 offset = const0_rtx;
1961 apply_bitpos = bitpos;
1962 /* ??? Any reason the field size would be different than
1963 the size we got from the type? */
1966 /* If this is an array reference, look for an outer field reference. */
1967 else if (TREE_CODE (t) == ARRAY_REF)
1969 tree off_tree = size_zero_node;
1973 tree index = TREE_OPERAND (t, 1);
1974 tree array = TREE_OPERAND (t, 0);
1975 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1976 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1977 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1979 /* We assume all arrays have sizes that are a multiple of a byte.
1980 First subtract the lower bound, if any, in the type of the
1981 index, then convert to sizetype and multiply by the size of the
1982 array element. */
1983 if (low_bound != 0 && ! integer_zerop (low_bound))
1984 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1985 index, low_bound));
1987 /* If the index has a self-referential type, pass it to a
1988 WITH_RECORD_EXPR; if the component size is, pass our
1989 component to one. */
1990 if (! TREE_CONSTANT (index)
1991 && contains_placeholder_p (index))
1992 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1993 if (! TREE_CONSTANT (unit_size)
1994 && contains_placeholder_p (unit_size))
1995 unit_size = build (WITH_RECORD_EXPR, sizetype,
1996 unit_size, array);
1998 off_tree
1999 = fold (build (PLUS_EXPR, sizetype,
2000 fold (build (MULT_EXPR, sizetype,
2001 index,
2002 unit_size)),
2003 off_tree));
2004 t = TREE_OPERAND (t, 0);
2006 while (TREE_CODE (t) == ARRAY_REF);
2008 if (DECL_P (t))
2010 expr = t;
2011 offset = NULL;
2012 if (host_integerp (off_tree, 1))
2014 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2015 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2016 align = DECL_ALIGN (t);
2017 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2018 align = aoff;
2019 offset = GEN_INT (ioff);
2020 apply_bitpos = bitpos;
2023 else if (TREE_CODE (t) == COMPONENT_REF)
2025 expr = component_ref_for_mem_expr (t);
2026 if (host_integerp (off_tree, 1))
2028 offset = GEN_INT (tree_low_cst (off_tree, 1));
2029 apply_bitpos = bitpos;
2031 /* ??? Any reason the field size would be different than
2032 the size we got from the type? */
2034 else if (flag_argument_noalias > 1
2035 && TREE_CODE (t) == INDIRECT_REF
2036 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2038 expr = t;
2039 offset = NULL;
2043 /* If this is a Fortran indirect argument reference, record the
2044 parameter decl. */
2045 else if (flag_argument_noalias > 1
2046 && TREE_CODE (t) == INDIRECT_REF
2047 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2049 expr = t;
2050 offset = NULL;
2054 /* If we modified OFFSET based on T, then subtract the outstanding
2055 bit position offset. Similarly, increase the size of the accessed
2056 object to contain the negative offset. */
2057 if (apply_bitpos)
2059 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2060 if (size)
2061 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2064 /* Now set the attributes we computed above. */
2065 MEM_ATTRS (ref)
2066 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2068 /* If this is already known to be a scalar or aggregate, we are done. */
2069 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2070 return;
2072 /* If it is a reference into an aggregate, this is part of an aggregate.
2073 Otherwise we don't know. */
2074 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2075 || TREE_CODE (t) == ARRAY_RANGE_REF
2076 || TREE_CODE (t) == BIT_FIELD_REF)
2077 MEM_IN_STRUCT_P (ref) = 1;
2080 void
2081 set_mem_attributes (ref, t, objectp)
2082 rtx ref;
2083 tree t;
2084 int objectp;
2086 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2089 /* Set the decl for MEM to DECL. */
2091 void
2092 set_mem_attrs_from_reg (mem, reg)
2093 rtx mem;
2094 rtx reg;
2096 MEM_ATTRS (mem)
2097 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2098 GEN_INT (REG_OFFSET (reg)),
2099 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2102 /* Set the alias set of MEM to SET. */
2104 void
2105 set_mem_alias_set (mem, set)
2106 rtx mem;
2107 HOST_WIDE_INT set;
2109 #ifdef ENABLE_CHECKING
2110 /* If the new and old alias sets don't conflict, something is wrong. */
2111 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2112 abort ();
2113 #endif
2115 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2116 MEM_SIZE (mem), MEM_ALIGN (mem),
2117 GET_MODE (mem));
2120 /* Set the alignment of MEM to ALIGN bits. */
2122 void
2123 set_mem_align (mem, align)
2124 rtx mem;
2125 unsigned int align;
2127 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2128 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2129 GET_MODE (mem));
2132 /* Set the expr for MEM to EXPR. */
2134 void
2135 set_mem_expr (mem, expr)
2136 rtx mem;
2137 tree expr;
2139 MEM_ATTRS (mem)
2140 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2141 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2144 /* Set the offset of MEM to OFFSET. */
2146 void
2147 set_mem_offset (mem, offset)
2148 rtx mem, offset;
2150 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2151 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2152 GET_MODE (mem));
2155 /* Set the size of MEM to SIZE. */
2157 void
2158 set_mem_size (mem, size)
2159 rtx mem, size;
2161 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2162 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2163 GET_MODE (mem));
2166 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2167 and its address changed to ADDR. (VOIDmode means don't change the mode.
2168 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2169 returned memory location is required to be valid. The memory
2170 attributes are not changed. */
2172 static rtx
2173 change_address_1 (memref, mode, addr, validate)
2174 rtx memref;
2175 enum machine_mode mode;
2176 rtx addr;
2177 int validate;
2179 rtx new;
2181 if (GET_CODE (memref) != MEM)
2182 abort ();
2183 if (mode == VOIDmode)
2184 mode = GET_MODE (memref);
2185 if (addr == 0)
2186 addr = XEXP (memref, 0);
2188 if (validate)
2190 if (reload_in_progress || reload_completed)
2192 if (! memory_address_p (mode, addr))
2193 abort ();
2195 else
2196 addr = memory_address (mode, addr);
2199 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2200 return memref;
2202 new = gen_rtx_MEM (mode, addr);
2203 MEM_COPY_ATTRIBUTES (new, memref);
2204 return new;
2207 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2208 way we are changing MEMREF, so we only preserve the alias set. */
2211 change_address (memref, mode, addr)
2212 rtx memref;
2213 enum machine_mode mode;
2214 rtx addr;
2216 rtx new = change_address_1 (memref, mode, addr, 1);
2217 enum machine_mode mmode = GET_MODE (new);
2219 MEM_ATTRS (new)
2220 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2221 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2222 (mmode == BLKmode ? BITS_PER_UNIT
2223 : GET_MODE_ALIGNMENT (mmode)),
2224 mmode);
2226 return new;
2229 /* Return a memory reference like MEMREF, but with its mode changed
2230 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2231 nonzero, the memory address is forced to be valid.
2232 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2233 and caller is responsible for adjusting MEMREF base register. */
2236 adjust_address_1 (memref, mode, offset, validate, adjust)
2237 rtx memref;
2238 enum machine_mode mode;
2239 HOST_WIDE_INT offset;
2240 int validate, adjust;
2242 rtx addr = XEXP (memref, 0);
2243 rtx new;
2244 rtx memoffset = MEM_OFFSET (memref);
2245 rtx size = 0;
2246 unsigned int memalign = MEM_ALIGN (memref);
2248 /* ??? Prefer to create garbage instead of creating shared rtl.
2249 This may happen even if offset is nonzero -- consider
2250 (plus (plus reg reg) const_int) -- so do this always. */
2251 addr = copy_rtx (addr);
2253 if (adjust)
2255 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2256 object, we can merge it into the LO_SUM. */
2257 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2258 && offset >= 0
2259 && (unsigned HOST_WIDE_INT) offset
2260 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2261 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2262 plus_constant (XEXP (addr, 1), offset));
2263 else
2264 addr = plus_constant (addr, offset);
2267 new = change_address_1 (memref, mode, addr, validate);
2269 /* Compute the new values of the memory attributes due to this adjustment.
2270 We add the offsets and update the alignment. */
2271 if (memoffset)
2272 memoffset = GEN_INT (offset + INTVAL (memoffset));
2274 /* Compute the new alignment by taking the MIN of the alignment and the
2275 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2276 if zero. */
2277 if (offset != 0)
2278 memalign
2279 = MIN (memalign,
2280 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2282 /* We can compute the size in a number of ways. */
2283 if (GET_MODE (new) != BLKmode)
2284 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2285 else if (MEM_SIZE (memref))
2286 size = plus_constant (MEM_SIZE (memref), -offset);
2288 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2289 memoffset, size, memalign, GET_MODE (new));
2291 /* At some point, we should validate that this offset is within the object,
2292 if all the appropriate values are known. */
2293 return new;
2296 /* Return a memory reference like MEMREF, but with its mode changed
2297 to MODE and its address changed to ADDR, which is assumed to be
2298 MEMREF offseted by OFFSET bytes. If VALIDATE is
2299 nonzero, the memory address is forced to be valid. */
2302 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2303 rtx memref;
2304 enum machine_mode mode;
2305 rtx addr;
2306 HOST_WIDE_INT offset;
2307 int validate;
2309 memref = change_address_1 (memref, VOIDmode, addr, validate);
2310 return adjust_address_1 (memref, mode, offset, validate, 0);
2313 /* Return a memory reference like MEMREF, but whose address is changed by
2314 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2315 known to be in OFFSET (possibly 1). */
2318 offset_address (memref, offset, pow2)
2319 rtx memref;
2320 rtx offset;
2321 unsigned HOST_WIDE_INT pow2;
2323 rtx new, addr = XEXP (memref, 0);
2325 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2327 /* At this point we don't know _why_ the address is invalid. It
2328 could have secondary memory refereces, multiplies or anything.
2330 However, if we did go and rearrange things, we can wind up not
2331 being able to recognize the magic around pic_offset_table_rtx.
2332 This stuff is fragile, and is yet another example of why it is
2333 bad to expose PIC machinery too early. */
2334 if (! memory_address_p (GET_MODE (memref), new)
2335 && GET_CODE (addr) == PLUS
2336 && XEXP (addr, 0) == pic_offset_table_rtx)
2338 addr = force_reg (GET_MODE (addr), addr);
2339 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2342 update_temp_slot_address (XEXP (memref, 0), new);
2343 new = change_address_1 (memref, VOIDmode, new, 1);
2345 /* Update the alignment to reflect the offset. Reset the offset, which
2346 we don't know. */
2347 MEM_ATTRS (new)
2348 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2349 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2350 GET_MODE (new));
2351 return new;
2354 /* Return a memory reference like MEMREF, but with its address changed to
2355 ADDR. The caller is asserting that the actual piece of memory pointed
2356 to is the same, just the form of the address is being changed, such as
2357 by putting something into a register. */
2360 replace_equiv_address (memref, addr)
2361 rtx memref;
2362 rtx addr;
2364 /* change_address_1 copies the memory attribute structure without change
2365 and that's exactly what we want here. */
2366 update_temp_slot_address (XEXP (memref, 0), addr);
2367 return change_address_1 (memref, VOIDmode, addr, 1);
2370 /* Likewise, but the reference is not required to be valid. */
2373 replace_equiv_address_nv (memref, addr)
2374 rtx memref;
2375 rtx addr;
2377 return change_address_1 (memref, VOIDmode, addr, 0);
2380 /* Return a memory reference like MEMREF, but with its mode widened to
2381 MODE and offset by OFFSET. This would be used by targets that e.g.
2382 cannot issue QImode memory operations and have to use SImode memory
2383 operations plus masking logic. */
2386 widen_memory_access (memref, mode, offset)
2387 rtx memref;
2388 enum machine_mode mode;
2389 HOST_WIDE_INT offset;
2391 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2392 tree expr = MEM_EXPR (new);
2393 rtx memoffset = MEM_OFFSET (new);
2394 unsigned int size = GET_MODE_SIZE (mode);
2396 /* If we don't know what offset we were at within the expression, then
2397 we can't know if we've overstepped the bounds. */
2398 if (! memoffset)
2399 expr = NULL_TREE;
2401 while (expr)
2403 if (TREE_CODE (expr) == COMPONENT_REF)
2405 tree field = TREE_OPERAND (expr, 1);
2407 if (! DECL_SIZE_UNIT (field))
2409 expr = NULL_TREE;
2410 break;
2413 /* Is the field at least as large as the access? If so, ok,
2414 otherwise strip back to the containing structure. */
2415 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2416 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2417 && INTVAL (memoffset) >= 0)
2418 break;
2420 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2422 expr = NULL_TREE;
2423 break;
2426 expr = TREE_OPERAND (expr, 0);
2427 memoffset = (GEN_INT (INTVAL (memoffset)
2428 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2429 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2430 / BITS_PER_UNIT)));
2432 /* Similarly for the decl. */
2433 else if (DECL_P (expr)
2434 && DECL_SIZE_UNIT (expr)
2435 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2436 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2437 && (! memoffset || INTVAL (memoffset) >= 0))
2438 break;
2439 else
2441 /* The widened memory access overflows the expression, which means
2442 that it could alias another expression. Zap it. */
2443 expr = NULL_TREE;
2444 break;
2448 if (! expr)
2449 memoffset = NULL_RTX;
2451 /* The widened memory may alias other stuff, so zap the alias set. */
2452 /* ??? Maybe use get_alias_set on any remaining expression. */
2454 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2455 MEM_ALIGN (new), mode);
2457 return new;
2460 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2463 gen_label_rtx ()
2465 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2466 NULL, label_num++, NULL);
2469 /* For procedure integration. */
2471 /* Install new pointers to the first and last insns in the chain.
2472 Also, set cur_insn_uid to one higher than the last in use.
2473 Used for an inline-procedure after copying the insn chain. */
2475 void
2476 set_new_first_and_last_insn (first, last)
2477 rtx first, last;
2479 rtx insn;
2481 first_insn = first;
2482 last_insn = last;
2483 cur_insn_uid = 0;
2485 for (insn = first; insn; insn = NEXT_INSN (insn))
2486 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2488 cur_insn_uid++;
2491 /* Set the range of label numbers found in the current function.
2492 This is used when belatedly compiling an inline function. */
2494 void
2495 set_new_first_and_last_label_num (first, last)
2496 int first, last;
2498 base_label_num = label_num;
2499 first_label_num = first;
2500 last_label_num = last;
2503 /* Set the last label number found in the current function.
2504 This is used when belatedly compiling an inline function. */
2506 void
2507 set_new_last_label_num (last)
2508 int last;
2510 base_label_num = label_num;
2511 last_label_num = last;
2514 /* Restore all variables describing the current status from the structure *P.
2515 This is used after a nested function. */
2517 void
2518 restore_emit_status (p)
2519 struct function *p ATTRIBUTE_UNUSED;
2521 last_label_num = 0;
2524 /* Go through all the RTL insn bodies and copy any invalid shared
2525 structure. This routine should only be called once. */
2527 void
2528 unshare_all_rtl (fndecl, insn)
2529 tree fndecl;
2530 rtx insn;
2532 tree decl;
2534 /* Make sure that virtual parameters are not shared. */
2535 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2536 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2538 /* Make sure that virtual stack slots are not shared. */
2539 unshare_all_decls (DECL_INITIAL (fndecl));
2541 /* Unshare just about everything else. */
2542 unshare_all_rtl_1 (insn);
2544 /* Make sure the addresses of stack slots found outside the insn chain
2545 (such as, in DECL_RTL of a variable) are not shared
2546 with the insn chain.
2548 This special care is necessary when the stack slot MEM does not
2549 actually appear in the insn chain. If it does appear, its address
2550 is unshared from all else at that point. */
2551 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2554 /* Go through all the RTL insn bodies and copy any invalid shared
2555 structure, again. This is a fairly expensive thing to do so it
2556 should be done sparingly. */
2558 void
2559 unshare_all_rtl_again (insn)
2560 rtx insn;
2562 rtx p;
2563 tree decl;
2565 for (p = insn; p; p = NEXT_INSN (p))
2566 if (INSN_P (p))
2568 reset_used_flags (PATTERN (p));
2569 reset_used_flags (REG_NOTES (p));
2570 reset_used_flags (LOG_LINKS (p));
2573 /* Make sure that virtual stack slots are not shared. */
2574 reset_used_decls (DECL_INITIAL (cfun->decl));
2576 /* Make sure that virtual parameters are not shared. */
2577 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2578 reset_used_flags (DECL_RTL (decl));
2580 reset_used_flags (stack_slot_list);
2582 unshare_all_rtl (cfun->decl, insn);
2585 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2586 Assumes the mark bits are cleared at entry. */
2588 static void
2589 unshare_all_rtl_1 (insn)
2590 rtx insn;
2592 for (; insn; insn = NEXT_INSN (insn))
2593 if (INSN_P (insn))
2595 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2596 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2597 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2601 /* Go through all virtual stack slots of a function and copy any
2602 shared structure. */
2603 static void
2604 unshare_all_decls (blk)
2605 tree blk;
2607 tree t;
2609 /* Copy shared decls. */
2610 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2611 if (DECL_RTL_SET_P (t))
2612 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2614 /* Now process sub-blocks. */
2615 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2616 unshare_all_decls (t);
2619 /* Go through all virtual stack slots of a function and mark them as
2620 not shared. */
2621 static void
2622 reset_used_decls (blk)
2623 tree blk;
2625 tree t;
2627 /* Mark decls. */
2628 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2629 if (DECL_RTL_SET_P (t))
2630 reset_used_flags (DECL_RTL (t));
2632 /* Now process sub-blocks. */
2633 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2634 reset_used_decls (t);
2637 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2638 placed in the result directly, rather than being copied. MAY_SHARE is
2639 either a MEM of an EXPR_LIST of MEMs. */
2642 copy_most_rtx (orig, may_share)
2643 rtx orig;
2644 rtx may_share;
2646 rtx copy;
2647 int i, j;
2648 RTX_CODE code;
2649 const char *format_ptr;
2651 if (orig == may_share
2652 || (GET_CODE (may_share) == EXPR_LIST
2653 && in_expr_list_p (may_share, orig)))
2654 return orig;
2656 code = GET_CODE (orig);
2658 switch (code)
2660 case REG:
2661 case QUEUED:
2662 case CONST_INT:
2663 case CONST_DOUBLE:
2664 case CONST_VECTOR:
2665 case SYMBOL_REF:
2666 case CODE_LABEL:
2667 case PC:
2668 case CC0:
2669 return orig;
2670 default:
2671 break;
2674 copy = rtx_alloc (code);
2675 PUT_MODE (copy, GET_MODE (orig));
2676 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2677 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2678 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2679 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2680 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2682 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2684 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2686 switch (*format_ptr++)
2688 case 'e':
2689 XEXP (copy, i) = XEXP (orig, i);
2690 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2691 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2692 break;
2694 case 'u':
2695 XEXP (copy, i) = XEXP (orig, i);
2696 break;
2698 case 'E':
2699 case 'V':
2700 XVEC (copy, i) = XVEC (orig, i);
2701 if (XVEC (orig, i) != NULL)
2703 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2704 for (j = 0; j < XVECLEN (copy, i); j++)
2705 XVECEXP (copy, i, j)
2706 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2708 break;
2710 case 'w':
2711 XWINT (copy, i) = XWINT (orig, i);
2712 break;
2714 case 'n':
2715 case 'i':
2716 XINT (copy, i) = XINT (orig, i);
2717 break;
2719 case 't':
2720 XTREE (copy, i) = XTREE (orig, i);
2721 break;
2723 case 's':
2724 case 'S':
2725 XSTR (copy, i) = XSTR (orig, i);
2726 break;
2728 case '0':
2729 /* Copy this through the wide int field; that's safest. */
2730 X0WINT (copy, i) = X0WINT (orig, i);
2731 break;
2733 default:
2734 abort ();
2737 return copy;
2740 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2741 Recursively does the same for subexpressions. */
2744 copy_rtx_if_shared (orig)
2745 rtx orig;
2747 rtx x = orig;
2748 int i;
2749 enum rtx_code code;
2750 const char *format_ptr;
2751 int copied = 0;
2753 if (x == 0)
2754 return 0;
2756 code = GET_CODE (x);
2758 /* These types may be freely shared. */
2760 switch (code)
2762 case REG:
2763 case QUEUED:
2764 case CONST_INT:
2765 case CONST_DOUBLE:
2766 case CONST_VECTOR:
2767 case SYMBOL_REF:
2768 case CODE_LABEL:
2769 case PC:
2770 case CC0:
2771 case SCRATCH:
2772 /* SCRATCH must be shared because they represent distinct values. */
2773 return x;
2775 case CONST:
2776 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2777 a LABEL_REF, it isn't sharable. */
2778 if (GET_CODE (XEXP (x, 0)) == PLUS
2779 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2780 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2781 return x;
2782 break;
2784 case INSN:
2785 case JUMP_INSN:
2786 case CALL_INSN:
2787 case NOTE:
2788 case BARRIER:
2789 /* The chain of insns is not being copied. */
2790 return x;
2792 case MEM:
2793 /* A MEM is allowed to be shared if its address is constant.
2795 We used to allow sharing of MEMs which referenced
2796 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2797 that can lose. instantiate_virtual_regs will not unshare
2798 the MEMs, and combine may change the structure of the address
2799 because it looks safe and profitable in one context, but
2800 in some other context it creates unrecognizable RTL. */
2801 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2802 return x;
2804 break;
2806 default:
2807 break;
2810 /* This rtx may not be shared. If it has already been seen,
2811 replace it with a copy of itself. */
2813 if (RTX_FLAG (x, used))
2815 rtx copy;
2817 copy = rtx_alloc (code);
2818 memcpy (copy, x,
2819 (sizeof (*copy) - sizeof (copy->fld)
2820 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2821 x = copy;
2822 copied = 1;
2824 RTX_FLAG (x, used) = 1;
2826 /* Now scan the subexpressions recursively.
2827 We can store any replaced subexpressions directly into X
2828 since we know X is not shared! Any vectors in X
2829 must be copied if X was copied. */
2831 format_ptr = GET_RTX_FORMAT (code);
2833 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2835 switch (*format_ptr++)
2837 case 'e':
2838 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2839 break;
2841 case 'E':
2842 if (XVEC (x, i) != NULL)
2844 int j;
2845 int len = XVECLEN (x, i);
2847 if (copied && len > 0)
2848 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2849 for (j = 0; j < len; j++)
2850 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2852 break;
2855 return x;
2858 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2859 to look for shared sub-parts. */
2861 void
2862 reset_used_flags (x)
2863 rtx x;
2865 int i, j;
2866 enum rtx_code code;
2867 const char *format_ptr;
2869 if (x == 0)
2870 return;
2872 code = GET_CODE (x);
2874 /* These types may be freely shared so we needn't do any resetting
2875 for them. */
2877 switch (code)
2879 case REG:
2880 case QUEUED:
2881 case CONST_INT:
2882 case CONST_DOUBLE:
2883 case CONST_VECTOR:
2884 case SYMBOL_REF:
2885 case CODE_LABEL:
2886 case PC:
2887 case CC0:
2888 return;
2890 case INSN:
2891 case JUMP_INSN:
2892 case CALL_INSN:
2893 case NOTE:
2894 case LABEL_REF:
2895 case BARRIER:
2896 /* The chain of insns is not being copied. */
2897 return;
2899 default:
2900 break;
2903 RTX_FLAG (x, used) = 0;
2905 format_ptr = GET_RTX_FORMAT (code);
2906 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2908 switch (*format_ptr++)
2910 case 'e':
2911 reset_used_flags (XEXP (x, i));
2912 break;
2914 case 'E':
2915 for (j = 0; j < XVECLEN (x, i); j++)
2916 reset_used_flags (XVECEXP (x, i, j));
2917 break;
2922 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2923 Return X or the rtx for the pseudo reg the value of X was copied into.
2924 OTHER must be valid as a SET_DEST. */
2927 make_safe_from (x, other)
2928 rtx x, other;
2930 while (1)
2931 switch (GET_CODE (other))
2933 case SUBREG:
2934 other = SUBREG_REG (other);
2935 break;
2936 case STRICT_LOW_PART:
2937 case SIGN_EXTEND:
2938 case ZERO_EXTEND:
2939 other = XEXP (other, 0);
2940 break;
2941 default:
2942 goto done;
2944 done:
2945 if ((GET_CODE (other) == MEM
2946 && ! CONSTANT_P (x)
2947 && GET_CODE (x) != REG
2948 && GET_CODE (x) != SUBREG)
2949 || (GET_CODE (other) == REG
2950 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2951 || reg_mentioned_p (other, x))))
2953 rtx temp = gen_reg_rtx (GET_MODE (x));
2954 emit_move_insn (temp, x);
2955 return temp;
2957 return x;
2960 /* Emission of insns (adding them to the doubly-linked list). */
2962 /* Return the first insn of the current sequence or current function. */
2965 get_insns ()
2967 return first_insn;
2970 /* Specify a new insn as the first in the chain. */
2972 void
2973 set_first_insn (insn)
2974 rtx insn;
2976 if (PREV_INSN (insn) != 0)
2977 abort ();
2978 first_insn = insn;
2981 /* Return the last insn emitted in current sequence or current function. */
2984 get_last_insn ()
2986 return last_insn;
2989 /* Specify a new insn as the last in the chain. */
2991 void
2992 set_last_insn (insn)
2993 rtx insn;
2995 if (NEXT_INSN (insn) != 0)
2996 abort ();
2997 last_insn = insn;
3000 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3003 get_last_insn_anywhere ()
3005 struct sequence_stack *stack;
3006 if (last_insn)
3007 return last_insn;
3008 for (stack = seq_stack; stack; stack = stack->next)
3009 if (stack->last != 0)
3010 return stack->last;
3011 return 0;
3014 /* Return the first nonnote insn emitted in current sequence or current
3015 function. This routine looks inside SEQUENCEs. */
3018 get_first_nonnote_insn ()
3020 rtx insn = first_insn;
3022 while (insn)
3024 insn = next_insn (insn);
3025 if (insn == 0 || GET_CODE (insn) != NOTE)
3026 break;
3029 return insn;
3032 /* Return the last nonnote insn emitted in current sequence or current
3033 function. This routine looks inside SEQUENCEs. */
3036 get_last_nonnote_insn ()
3038 rtx insn = last_insn;
3040 while (insn)
3042 insn = previous_insn (insn);
3043 if (insn == 0 || GET_CODE (insn) != NOTE)
3044 break;
3047 return insn;
3050 /* Return a number larger than any instruction's uid in this function. */
3053 get_max_uid ()
3055 return cur_insn_uid;
3058 /* Renumber instructions so that no instruction UIDs are wasted. */
3060 void
3061 renumber_insns (stream)
3062 FILE *stream;
3064 rtx insn;
3066 /* If we're not supposed to renumber instructions, don't. */
3067 if (!flag_renumber_insns)
3068 return;
3070 /* If there aren't that many instructions, then it's not really
3071 worth renumbering them. */
3072 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3073 return;
3075 cur_insn_uid = 1;
3077 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3079 if (stream)
3080 fprintf (stream, "Renumbering insn %d to %d\n",
3081 INSN_UID (insn), cur_insn_uid);
3082 INSN_UID (insn) = cur_insn_uid++;
3086 /* Return the next insn. If it is a SEQUENCE, return the first insn
3087 of the sequence. */
3090 next_insn (insn)
3091 rtx insn;
3093 if (insn)
3095 insn = NEXT_INSN (insn);
3096 if (insn && GET_CODE (insn) == INSN
3097 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3098 insn = XVECEXP (PATTERN (insn), 0, 0);
3101 return insn;
3104 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3105 of the sequence. */
3108 previous_insn (insn)
3109 rtx insn;
3111 if (insn)
3113 insn = PREV_INSN (insn);
3114 if (insn && GET_CODE (insn) == INSN
3115 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3116 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3119 return insn;
3122 /* Return the next insn after INSN that is not a NOTE. This routine does not
3123 look inside SEQUENCEs. */
3126 next_nonnote_insn (insn)
3127 rtx insn;
3129 while (insn)
3131 insn = NEXT_INSN (insn);
3132 if (insn == 0 || GET_CODE (insn) != NOTE)
3133 break;
3136 return insn;
3139 /* Return the previous insn before INSN that is not a NOTE. This routine does
3140 not look inside SEQUENCEs. */
3143 prev_nonnote_insn (insn)
3144 rtx insn;
3146 while (insn)
3148 insn = PREV_INSN (insn);
3149 if (insn == 0 || GET_CODE (insn) != NOTE)
3150 break;
3153 return insn;
3156 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3157 or 0, if there is none. This routine does not look inside
3158 SEQUENCEs. */
3161 next_real_insn (insn)
3162 rtx insn;
3164 while (insn)
3166 insn = NEXT_INSN (insn);
3167 if (insn == 0 || GET_CODE (insn) == INSN
3168 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3169 break;
3172 return insn;
3175 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3176 or 0, if there is none. This routine does not look inside
3177 SEQUENCEs. */
3180 prev_real_insn (insn)
3181 rtx insn;
3183 while (insn)
3185 insn = PREV_INSN (insn);
3186 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3187 || GET_CODE (insn) == JUMP_INSN)
3188 break;
3191 return insn;
3194 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3195 This routine does not look inside SEQUENCEs. */
3198 last_call_insn ()
3200 rtx insn;
3202 for (insn = get_last_insn ();
3203 insn && GET_CODE (insn) != CALL_INSN;
3204 insn = PREV_INSN (insn))
3207 return insn;
3210 /* Find the next insn after INSN that really does something. This routine
3211 does not look inside SEQUENCEs. Until reload has completed, this is the
3212 same as next_real_insn. */
3215 active_insn_p (insn)
3216 rtx insn;
3218 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3219 || (GET_CODE (insn) == INSN
3220 && (! reload_completed
3221 || (GET_CODE (PATTERN (insn)) != USE
3222 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3226 next_active_insn (insn)
3227 rtx insn;
3229 while (insn)
3231 insn = NEXT_INSN (insn);
3232 if (insn == 0 || active_insn_p (insn))
3233 break;
3236 return insn;
3239 /* Find the last insn before INSN that really does something. This routine
3240 does not look inside SEQUENCEs. Until reload has completed, this is the
3241 same as prev_real_insn. */
3244 prev_active_insn (insn)
3245 rtx insn;
3247 while (insn)
3249 insn = PREV_INSN (insn);
3250 if (insn == 0 || active_insn_p (insn))
3251 break;
3254 return insn;
3257 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3260 next_label (insn)
3261 rtx insn;
3263 while (insn)
3265 insn = NEXT_INSN (insn);
3266 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3267 break;
3270 return insn;
3273 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3276 prev_label (insn)
3277 rtx insn;
3279 while (insn)
3281 insn = PREV_INSN (insn);
3282 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3283 break;
3286 return insn;
3289 #ifdef HAVE_cc0
3290 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3291 and REG_CC_USER notes so we can find it. */
3293 void
3294 link_cc0_insns (insn)
3295 rtx insn;
3297 rtx user = next_nonnote_insn (insn);
3299 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3300 user = XVECEXP (PATTERN (user), 0, 0);
3302 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3303 REG_NOTES (user));
3304 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3307 /* Return the next insn that uses CC0 after INSN, which is assumed to
3308 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3309 applied to the result of this function should yield INSN).
3311 Normally, this is simply the next insn. However, if a REG_CC_USER note
3312 is present, it contains the insn that uses CC0.
3314 Return 0 if we can't find the insn. */
3317 next_cc0_user (insn)
3318 rtx insn;
3320 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3322 if (note)
3323 return XEXP (note, 0);
3325 insn = next_nonnote_insn (insn);
3326 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3327 insn = XVECEXP (PATTERN (insn), 0, 0);
3329 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3330 return insn;
3332 return 0;
3335 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3336 note, it is the previous insn. */
3339 prev_cc0_setter (insn)
3340 rtx insn;
3342 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3344 if (note)
3345 return XEXP (note, 0);
3347 insn = prev_nonnote_insn (insn);
3348 if (! sets_cc0_p (PATTERN (insn)))
3349 abort ();
3351 return insn;
3353 #endif
3355 /* Increment the label uses for all labels present in rtx. */
3357 static void
3358 mark_label_nuses (x)
3359 rtx x;
3361 enum rtx_code code;
3362 int i, j;
3363 const char *fmt;
3365 code = GET_CODE (x);
3366 if (code == LABEL_REF)
3367 LABEL_NUSES (XEXP (x, 0))++;
3369 fmt = GET_RTX_FORMAT (code);
3370 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3372 if (fmt[i] == 'e')
3373 mark_label_nuses (XEXP (x, i));
3374 else if (fmt[i] == 'E')
3375 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3376 mark_label_nuses (XVECEXP (x, i, j));
3381 /* Try splitting insns that can be split for better scheduling.
3382 PAT is the pattern which might split.
3383 TRIAL is the insn providing PAT.
3384 LAST is nonzero if we should return the last insn of the sequence produced.
3386 If this routine succeeds in splitting, it returns the first or last
3387 replacement insn depending on the value of LAST. Otherwise, it
3388 returns TRIAL. If the insn to be returned can be split, it will be. */
3391 try_split (pat, trial, last)
3392 rtx pat, trial;
3393 int last;
3395 rtx before = PREV_INSN (trial);
3396 rtx after = NEXT_INSN (trial);
3397 int has_barrier = 0;
3398 rtx tem;
3399 rtx note, seq;
3400 int probability;
3401 rtx insn_last, insn;
3402 int njumps = 0;
3404 if (any_condjump_p (trial)
3405 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3406 split_branch_probability = INTVAL (XEXP (note, 0));
3407 probability = split_branch_probability;
3409 seq = split_insns (pat, trial);
3411 split_branch_probability = -1;
3413 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3414 We may need to handle this specially. */
3415 if (after && GET_CODE (after) == BARRIER)
3417 has_barrier = 1;
3418 after = NEXT_INSN (after);
3421 if (!seq)
3422 return trial;
3424 /* Avoid infinite loop if any insn of the result matches
3425 the original pattern. */
3426 insn_last = seq;
3427 while (1)
3429 if (INSN_P (insn_last)
3430 && rtx_equal_p (PATTERN (insn_last), pat))
3431 return trial;
3432 if (!NEXT_INSN (insn_last))
3433 break;
3434 insn_last = NEXT_INSN (insn_last);
3437 /* Mark labels. */
3438 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3440 if (GET_CODE (insn) == JUMP_INSN)
3442 mark_jump_label (PATTERN (insn), insn, 0);
3443 njumps++;
3444 if (probability != -1
3445 && any_condjump_p (insn)
3446 && !find_reg_note (insn, REG_BR_PROB, 0))
3448 /* We can preserve the REG_BR_PROB notes only if exactly
3449 one jump is created, otherwise the machine description
3450 is responsible for this step using
3451 split_branch_probability variable. */
3452 if (njumps != 1)
3453 abort ();
3454 REG_NOTES (insn)
3455 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3456 GEN_INT (probability),
3457 REG_NOTES (insn));
3462 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3463 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3464 if (GET_CODE (trial) == CALL_INSN)
3466 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3467 if (GET_CODE (insn) == CALL_INSN)
3469 CALL_INSN_FUNCTION_USAGE (insn)
3470 = CALL_INSN_FUNCTION_USAGE (trial);
3471 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3475 /* Copy notes, particularly those related to the CFG. */
3476 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3478 switch (REG_NOTE_KIND (note))
3480 case REG_EH_REGION:
3481 insn = insn_last;
3482 while (insn != NULL_RTX)
3484 if (GET_CODE (insn) == CALL_INSN
3485 || (flag_non_call_exceptions
3486 && may_trap_p (PATTERN (insn))))
3487 REG_NOTES (insn)
3488 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3489 XEXP (note, 0),
3490 REG_NOTES (insn));
3491 insn = PREV_INSN (insn);
3493 break;
3495 case REG_NORETURN:
3496 case REG_SETJMP:
3497 case REG_ALWAYS_RETURN:
3498 insn = insn_last;
3499 while (insn != NULL_RTX)
3501 if (GET_CODE (insn) == CALL_INSN)
3502 REG_NOTES (insn)
3503 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3504 XEXP (note, 0),
3505 REG_NOTES (insn));
3506 insn = PREV_INSN (insn);
3508 break;
3510 case REG_NON_LOCAL_GOTO:
3511 insn = insn_last;
3512 while (insn != NULL_RTX)
3514 if (GET_CODE (insn) == JUMP_INSN)
3515 REG_NOTES (insn)
3516 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3517 XEXP (note, 0),
3518 REG_NOTES (insn));
3519 insn = PREV_INSN (insn);
3521 break;
3523 default:
3524 break;
3528 /* If there are LABELS inside the split insns increment the
3529 usage count so we don't delete the label. */
3530 if (GET_CODE (trial) == INSN)
3532 insn = insn_last;
3533 while (insn != NULL_RTX)
3535 if (GET_CODE (insn) == INSN)
3536 mark_label_nuses (PATTERN (insn));
3538 insn = PREV_INSN (insn);
3542 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3544 delete_insn (trial);
3545 if (has_barrier)
3546 emit_barrier_after (tem);
3548 /* Recursively call try_split for each new insn created; by the
3549 time control returns here that insn will be fully split, so
3550 set LAST and continue from the insn after the one returned.
3551 We can't use next_active_insn here since AFTER may be a note.
3552 Ignore deleted insns, which can be occur if not optimizing. */
3553 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3554 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3555 tem = try_split (PATTERN (tem), tem, 1);
3557 /* Return either the first or the last insn, depending on which was
3558 requested. */
3559 return last
3560 ? (after ? PREV_INSN (after) : last_insn)
3561 : NEXT_INSN (before);
3564 /* Make and return an INSN rtx, initializing all its slots.
3565 Store PATTERN in the pattern slots. */
3568 make_insn_raw (pattern)
3569 rtx pattern;
3571 rtx insn;
3573 insn = rtx_alloc (INSN);
3575 INSN_UID (insn) = cur_insn_uid++;
3576 PATTERN (insn) = pattern;
3577 INSN_CODE (insn) = -1;
3578 LOG_LINKS (insn) = NULL;
3579 REG_NOTES (insn) = NULL;
3580 INSN_SCOPE (insn) = NULL;
3581 BLOCK_FOR_INSN (insn) = NULL;
3583 #ifdef ENABLE_RTL_CHECKING
3584 if (insn
3585 && INSN_P (insn)
3586 && (returnjump_p (insn)
3587 || (GET_CODE (insn) == SET
3588 && SET_DEST (insn) == pc_rtx)))
3590 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3591 debug_rtx (insn);
3593 #endif
3595 return insn;
3598 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3600 static rtx
3601 make_jump_insn_raw (pattern)
3602 rtx pattern;
3604 rtx insn;
3606 insn = rtx_alloc (JUMP_INSN);
3607 INSN_UID (insn) = cur_insn_uid++;
3609 PATTERN (insn) = pattern;
3610 INSN_CODE (insn) = -1;
3611 LOG_LINKS (insn) = NULL;
3612 REG_NOTES (insn) = NULL;
3613 JUMP_LABEL (insn) = NULL;
3614 INSN_SCOPE (insn) = NULL;
3615 BLOCK_FOR_INSN (insn) = NULL;
3617 return insn;
3620 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3622 static rtx
3623 make_call_insn_raw (pattern)
3624 rtx pattern;
3626 rtx insn;
3628 insn = rtx_alloc (CALL_INSN);
3629 INSN_UID (insn) = cur_insn_uid++;
3631 PATTERN (insn) = pattern;
3632 INSN_CODE (insn) = -1;
3633 LOG_LINKS (insn) = NULL;
3634 REG_NOTES (insn) = NULL;
3635 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3636 INSN_SCOPE (insn) = NULL;
3637 BLOCK_FOR_INSN (insn) = NULL;
3639 return insn;
3642 /* Add INSN to the end of the doubly-linked list.
3643 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3645 void
3646 add_insn (insn)
3647 rtx insn;
3649 PREV_INSN (insn) = last_insn;
3650 NEXT_INSN (insn) = 0;
3652 if (NULL != last_insn)
3653 NEXT_INSN (last_insn) = insn;
3655 if (NULL == first_insn)
3656 first_insn = insn;
3658 last_insn = insn;
3661 /* Add INSN into the doubly-linked list after insn AFTER. This and
3662 the next should be the only functions called to insert an insn once
3663 delay slots have been filled since only they know how to update a
3664 SEQUENCE. */
3666 void
3667 add_insn_after (insn, after)
3668 rtx insn, after;
3670 rtx next = NEXT_INSN (after);
3671 basic_block bb;
3673 if (optimize && INSN_DELETED_P (after))
3674 abort ();
3676 NEXT_INSN (insn) = next;
3677 PREV_INSN (insn) = after;
3679 if (next)
3681 PREV_INSN (next) = insn;
3682 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3683 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3685 else if (last_insn == after)
3686 last_insn = insn;
3687 else
3689 struct sequence_stack *stack = seq_stack;
3690 /* Scan all pending sequences too. */
3691 for (; stack; stack = stack->next)
3692 if (after == stack->last)
3694 stack->last = insn;
3695 break;
3698 if (stack == 0)
3699 abort ();
3702 if (GET_CODE (after) != BARRIER
3703 && GET_CODE (insn) != BARRIER
3704 && (bb = BLOCK_FOR_INSN (after)))
3706 set_block_for_insn (insn, bb);
3707 if (INSN_P (insn))
3708 bb->flags |= BB_DIRTY;
3709 /* Should not happen as first in the BB is always
3710 either NOTE or LABEL. */
3711 if (bb->end == after
3712 /* Avoid clobbering of structure when creating new BB. */
3713 && GET_CODE (insn) != BARRIER
3714 && (GET_CODE (insn) != NOTE
3715 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3716 bb->end = insn;
3719 NEXT_INSN (after) = insn;
3720 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3722 rtx sequence = PATTERN (after);
3723 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3727 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3728 the previous should be the only functions called to insert an insn once
3729 delay slots have been filled since only they know how to update a
3730 SEQUENCE. */
3732 void
3733 add_insn_before (insn, before)
3734 rtx insn, before;
3736 rtx prev = PREV_INSN (before);
3737 basic_block bb;
3739 if (optimize && INSN_DELETED_P (before))
3740 abort ();
3742 PREV_INSN (insn) = prev;
3743 NEXT_INSN (insn) = before;
3745 if (prev)
3747 NEXT_INSN (prev) = insn;
3748 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3750 rtx sequence = PATTERN (prev);
3751 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3754 else if (first_insn == before)
3755 first_insn = insn;
3756 else
3758 struct sequence_stack *stack = seq_stack;
3759 /* Scan all pending sequences too. */
3760 for (; stack; stack = stack->next)
3761 if (before == stack->first)
3763 stack->first = insn;
3764 break;
3767 if (stack == 0)
3768 abort ();
3771 if (GET_CODE (before) != BARRIER
3772 && GET_CODE (insn) != BARRIER
3773 && (bb = BLOCK_FOR_INSN (before)))
3775 set_block_for_insn (insn, bb);
3776 if (INSN_P (insn))
3777 bb->flags |= BB_DIRTY;
3778 /* Should not happen as first in the BB is always
3779 either NOTE or LABEl. */
3780 if (bb->head == insn
3781 /* Avoid clobbering of structure when creating new BB. */
3782 && GET_CODE (insn) != BARRIER
3783 && (GET_CODE (insn) != NOTE
3784 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3785 abort ();
3788 PREV_INSN (before) = insn;
3789 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3790 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3793 /* Remove an insn from its doubly-linked list. This function knows how
3794 to handle sequences. */
3795 void
3796 remove_insn (insn)
3797 rtx insn;
3799 rtx next = NEXT_INSN (insn);
3800 rtx prev = PREV_INSN (insn);
3801 basic_block bb;
3803 if (prev)
3805 NEXT_INSN (prev) = next;
3806 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3808 rtx sequence = PATTERN (prev);
3809 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3812 else if (first_insn == insn)
3813 first_insn = next;
3814 else
3816 struct sequence_stack *stack = seq_stack;
3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (insn == stack->first)
3821 stack->first = next;
3822 break;
3825 if (stack == 0)
3826 abort ();
3829 if (next)
3831 PREV_INSN (next) = prev;
3832 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3833 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3835 else if (last_insn == insn)
3836 last_insn = prev;
3837 else
3839 struct sequence_stack *stack = seq_stack;
3840 /* Scan all pending sequences too. */
3841 for (; stack; stack = stack->next)
3842 if (insn == stack->last)
3844 stack->last = prev;
3845 break;
3848 if (stack == 0)
3849 abort ();
3851 if (GET_CODE (insn) != BARRIER
3852 && (bb = BLOCK_FOR_INSN (insn)))
3854 if (INSN_P (insn))
3855 bb->flags |= BB_DIRTY;
3856 if (bb->head == insn)
3858 /* Never ever delete the basic block note without deleting whole
3859 basic block. */
3860 if (GET_CODE (insn) == NOTE)
3861 abort ();
3862 bb->head = next;
3864 if (bb->end == insn)
3865 bb->end = prev;
3869 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3871 void
3872 add_function_usage_to (call_insn, call_fusage)
3873 rtx call_insn, call_fusage;
3875 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3876 abort ();
3878 /* Put the register usage information on the CALL. If there is already
3879 some usage information, put ours at the end. */
3880 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3882 rtx link;
3884 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3885 link = XEXP (link, 1))
3888 XEXP (link, 1) = call_fusage;
3890 else
3891 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3894 /* Delete all insns made since FROM.
3895 FROM becomes the new last instruction. */
3897 void
3898 delete_insns_since (from)
3899 rtx from;
3901 if (from == 0)
3902 first_insn = 0;
3903 else
3904 NEXT_INSN (from) = 0;
3905 last_insn = from;
3908 /* This function is deprecated, please use sequences instead.
3910 Move a consecutive bunch of insns to a different place in the chain.
3911 The insns to be moved are those between FROM and TO.
3912 They are moved to a new position after the insn AFTER.
3913 AFTER must not be FROM or TO or any insn in between.
3915 This function does not know about SEQUENCEs and hence should not be
3916 called after delay-slot filling has been done. */
3918 void
3919 reorder_insns_nobb (from, to, after)
3920 rtx from, to, after;
3922 /* Splice this bunch out of where it is now. */
3923 if (PREV_INSN (from))
3924 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3925 if (NEXT_INSN (to))
3926 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3927 if (last_insn == to)
3928 last_insn = PREV_INSN (from);
3929 if (first_insn == from)
3930 first_insn = NEXT_INSN (to);
3932 /* Make the new neighbors point to it and it to them. */
3933 if (NEXT_INSN (after))
3934 PREV_INSN (NEXT_INSN (after)) = to;
3936 NEXT_INSN (to) = NEXT_INSN (after);
3937 PREV_INSN (from) = after;
3938 NEXT_INSN (after) = from;
3939 if (after == last_insn)
3940 last_insn = to;
3943 /* Same as function above, but take care to update BB boundaries. */
3944 void
3945 reorder_insns (from, to, after)
3946 rtx from, to, after;
3948 rtx prev = PREV_INSN (from);
3949 basic_block bb, bb2;
3951 reorder_insns_nobb (from, to, after);
3953 if (GET_CODE (after) != BARRIER
3954 && (bb = BLOCK_FOR_INSN (after)))
3956 rtx x;
3957 bb->flags |= BB_DIRTY;
3959 if (GET_CODE (from) != BARRIER
3960 && (bb2 = BLOCK_FOR_INSN (from)))
3962 if (bb2->end == to)
3963 bb2->end = prev;
3964 bb2->flags |= BB_DIRTY;
3967 if (bb->end == after)
3968 bb->end = to;
3970 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3971 set_block_for_insn (x, bb);
3975 /* Return the line note insn preceding INSN. */
3977 static rtx
3978 find_line_note (insn)
3979 rtx insn;
3981 if (no_line_numbers)
3982 return 0;
3984 for (; insn; insn = PREV_INSN (insn))
3985 if (GET_CODE (insn) == NOTE
3986 && NOTE_LINE_NUMBER (insn) >= 0)
3987 break;
3989 return insn;
3992 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3993 of the moved insns when debugging. This may insert a note between AFTER
3994 and FROM, and another one after TO. */
3996 void
3997 reorder_insns_with_line_notes (from, to, after)
3998 rtx from, to, after;
4000 rtx from_line = find_line_note (from);
4001 rtx after_line = find_line_note (after);
4003 reorder_insns (from, to, after);
4005 if (from_line == after_line)
4006 return;
4008 if (from_line)
4009 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4010 NOTE_LINE_NUMBER (from_line),
4011 after);
4012 if (after_line)
4013 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4014 NOTE_LINE_NUMBER (after_line),
4015 to);
4018 /* Remove unnecessary notes from the instruction stream. */
4020 void
4021 remove_unnecessary_notes ()
4023 rtx block_stack = NULL_RTX;
4024 rtx eh_stack = NULL_RTX;
4025 rtx insn;
4026 rtx next;
4027 rtx tmp;
4029 /* We must not remove the first instruction in the function because
4030 the compiler depends on the first instruction being a note. */
4031 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4033 /* Remember what's next. */
4034 next = NEXT_INSN (insn);
4036 /* We're only interested in notes. */
4037 if (GET_CODE (insn) != NOTE)
4038 continue;
4040 switch (NOTE_LINE_NUMBER (insn))
4042 case NOTE_INSN_DELETED:
4043 case NOTE_INSN_LOOP_END_TOP_COND:
4044 remove_insn (insn);
4045 break;
4047 case NOTE_INSN_EH_REGION_BEG:
4048 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4049 break;
4051 case NOTE_INSN_EH_REGION_END:
4052 /* Too many end notes. */
4053 if (eh_stack == NULL_RTX)
4054 abort ();
4055 /* Mismatched nesting. */
4056 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4057 abort ();
4058 tmp = eh_stack;
4059 eh_stack = XEXP (eh_stack, 1);
4060 free_INSN_LIST_node (tmp);
4061 break;
4063 case NOTE_INSN_BLOCK_BEG:
4064 /* By now, all notes indicating lexical blocks should have
4065 NOTE_BLOCK filled in. */
4066 if (NOTE_BLOCK (insn) == NULL_TREE)
4067 abort ();
4068 block_stack = alloc_INSN_LIST (insn, block_stack);
4069 break;
4071 case NOTE_INSN_BLOCK_END:
4072 /* Too many end notes. */
4073 if (block_stack == NULL_RTX)
4074 abort ();
4075 /* Mismatched nesting. */
4076 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4077 abort ();
4078 tmp = block_stack;
4079 block_stack = XEXP (block_stack, 1);
4080 free_INSN_LIST_node (tmp);
4082 /* Scan back to see if there are any non-note instructions
4083 between INSN and the beginning of this block. If not,
4084 then there is no PC range in the generated code that will
4085 actually be in this block, so there's no point in
4086 remembering the existence of the block. */
4087 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4089 /* This block contains a real instruction. Note that we
4090 don't include labels; if the only thing in the block
4091 is a label, then there are still no PC values that
4092 lie within the block. */
4093 if (INSN_P (tmp))
4094 break;
4096 /* We're only interested in NOTEs. */
4097 if (GET_CODE (tmp) != NOTE)
4098 continue;
4100 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4102 /* We just verified that this BLOCK matches us with
4103 the block_stack check above. Never delete the
4104 BLOCK for the outermost scope of the function; we
4105 can refer to names from that scope even if the
4106 block notes are messed up. */
4107 if (! is_body_block (NOTE_BLOCK (insn))
4108 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4110 remove_insn (tmp);
4111 remove_insn (insn);
4113 break;
4115 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4116 /* There's a nested block. We need to leave the
4117 current block in place since otherwise the debugger
4118 wouldn't be able to show symbols from our block in
4119 the nested block. */
4120 break;
4125 /* Too many begin notes. */
4126 if (block_stack || eh_stack)
4127 abort ();
4131 /* Emit insn(s) of given code and pattern
4132 at a specified place within the doubly-linked list.
4134 All of the emit_foo global entry points accept an object
4135 X which is either an insn list or a PATTERN of a single
4136 instruction.
4138 There are thus a few canonical ways to generate code and
4139 emit it at a specific place in the instruction stream. For
4140 example, consider the instruction named SPOT and the fact that
4141 we would like to emit some instructions before SPOT. We might
4142 do it like this:
4144 start_sequence ();
4145 ... emit the new instructions ...
4146 insns_head = get_insns ();
4147 end_sequence ();
4149 emit_insn_before (insns_head, SPOT);
4151 It used to be common to generate SEQUENCE rtl instead, but that
4152 is a relic of the past which no longer occurs. The reason is that
4153 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4154 generated would almost certainly die right after it was created. */
4156 /* Make X be output before the instruction BEFORE. */
4159 emit_insn_before (x, before)
4160 rtx x, before;
4162 rtx last = before;
4163 rtx insn;
4165 #ifdef ENABLE_RTL_CHECKING
4166 if (before == NULL_RTX)
4167 abort ();
4168 #endif
4170 if (x == NULL_RTX)
4171 return last;
4173 switch (GET_CODE (x))
4175 case INSN:
4176 case JUMP_INSN:
4177 case CALL_INSN:
4178 case CODE_LABEL:
4179 case BARRIER:
4180 case NOTE:
4181 insn = x;
4182 while (insn)
4184 rtx next = NEXT_INSN (insn);
4185 add_insn_before (insn, before);
4186 last = insn;
4187 insn = next;
4189 break;
4191 #ifdef ENABLE_RTL_CHECKING
4192 case SEQUENCE:
4193 abort ();
4194 break;
4195 #endif
4197 default:
4198 last = make_insn_raw (x);
4199 add_insn_before (last, before);
4200 break;
4203 return last;
4206 /* Make an instruction with body X and code JUMP_INSN
4207 and output it before the instruction BEFORE. */
4210 emit_jump_insn_before (x, before)
4211 rtx x, before;
4213 rtx insn, last = NULL_RTX;
4215 #ifdef ENABLE_RTL_CHECKING
4216 if (before == NULL_RTX)
4217 abort ();
4218 #endif
4220 switch (GET_CODE (x))
4222 case INSN:
4223 case JUMP_INSN:
4224 case CALL_INSN:
4225 case CODE_LABEL:
4226 case BARRIER:
4227 case NOTE:
4228 insn = x;
4229 while (insn)
4231 rtx next = NEXT_INSN (insn);
4232 add_insn_before (insn, before);
4233 last = insn;
4234 insn = next;
4236 break;
4238 #ifdef ENABLE_RTL_CHECKING
4239 case SEQUENCE:
4240 abort ();
4241 break;
4242 #endif
4244 default:
4245 last = make_jump_insn_raw (x);
4246 add_insn_before (last, before);
4247 break;
4250 return last;
4253 /* Make an instruction with body X and code CALL_INSN
4254 and output it before the instruction BEFORE. */
4257 emit_call_insn_before (x, before)
4258 rtx x, before;
4260 rtx last = NULL_RTX, insn;
4262 #ifdef ENABLE_RTL_CHECKING
4263 if (before == NULL_RTX)
4264 abort ();
4265 #endif
4267 switch (GET_CODE (x))
4269 case INSN:
4270 case JUMP_INSN:
4271 case CALL_INSN:
4272 case CODE_LABEL:
4273 case BARRIER:
4274 case NOTE:
4275 insn = x;
4276 while (insn)
4278 rtx next = NEXT_INSN (insn);
4279 add_insn_before (insn, before);
4280 last = insn;
4281 insn = next;
4283 break;
4285 #ifdef ENABLE_RTL_CHECKING
4286 case SEQUENCE:
4287 abort ();
4288 break;
4289 #endif
4291 default:
4292 last = make_call_insn_raw (x);
4293 add_insn_before (last, before);
4294 break;
4297 return last;
4300 /* Make an insn of code BARRIER
4301 and output it before the insn BEFORE. */
4304 emit_barrier_before (before)
4305 rtx before;
4307 rtx insn = rtx_alloc (BARRIER);
4309 INSN_UID (insn) = cur_insn_uid++;
4311 add_insn_before (insn, before);
4312 return insn;
4315 /* Emit the label LABEL before the insn BEFORE. */
4318 emit_label_before (label, before)
4319 rtx label, before;
4321 /* This can be called twice for the same label as a result of the
4322 confusion that follows a syntax error! So make it harmless. */
4323 if (INSN_UID (label) == 0)
4325 INSN_UID (label) = cur_insn_uid++;
4326 add_insn_before (label, before);
4329 return label;
4332 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4335 emit_note_before (subtype, before)
4336 int subtype;
4337 rtx before;
4339 rtx note = rtx_alloc (NOTE);
4340 INSN_UID (note) = cur_insn_uid++;
4341 NOTE_SOURCE_FILE (note) = 0;
4342 NOTE_LINE_NUMBER (note) = subtype;
4343 BLOCK_FOR_INSN (note) = NULL;
4345 add_insn_before (note, before);
4346 return note;
4349 /* Helper for emit_insn_after, handles lists of instructions
4350 efficiently. */
4352 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4354 static rtx
4355 emit_insn_after_1 (first, after)
4356 rtx first, after;
4358 rtx last;
4359 rtx after_after;
4360 basic_block bb;
4362 if (GET_CODE (after) != BARRIER
4363 && (bb = BLOCK_FOR_INSN (after)))
4365 bb->flags |= BB_DIRTY;
4366 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4367 if (GET_CODE (last) != BARRIER)
4368 set_block_for_insn (last, bb);
4369 if (GET_CODE (last) != BARRIER)
4370 set_block_for_insn (last, bb);
4371 if (bb->end == after)
4372 bb->end = last;
4374 else
4375 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4376 continue;
4378 after_after = NEXT_INSN (after);
4380 NEXT_INSN (after) = first;
4381 PREV_INSN (first) = after;
4382 NEXT_INSN (last) = after_after;
4383 if (after_after)
4384 PREV_INSN (after_after) = last;
4386 if (after == last_insn)
4387 last_insn = last;
4388 return last;
4391 /* Make X be output after the insn AFTER. */
4394 emit_insn_after (x, after)
4395 rtx x, after;
4397 rtx last = after;
4399 #ifdef ENABLE_RTL_CHECKING
4400 if (after == NULL_RTX)
4401 abort ();
4402 #endif
4404 if (x == NULL_RTX)
4405 return last;
4407 switch (GET_CODE (x))
4409 case INSN:
4410 case JUMP_INSN:
4411 case CALL_INSN:
4412 case CODE_LABEL:
4413 case BARRIER:
4414 case NOTE:
4415 last = emit_insn_after_1 (x, after);
4416 break;
4418 #ifdef ENABLE_RTL_CHECKING
4419 case SEQUENCE:
4420 abort ();
4421 break;
4422 #endif
4424 default:
4425 last = make_insn_raw (x);
4426 add_insn_after (last, after);
4427 break;
4430 return last;
4433 /* Similar to emit_insn_after, except that line notes are to be inserted so
4434 as to act as if this insn were at FROM. */
4436 void
4437 emit_insn_after_with_line_notes (x, after, from)
4438 rtx x, after, from;
4440 rtx from_line = find_line_note (from);
4441 rtx after_line = find_line_note (after);
4442 rtx insn = emit_insn_after (x, after);
4444 if (from_line)
4445 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4446 NOTE_LINE_NUMBER (from_line),
4447 after);
4449 if (after_line)
4450 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4451 NOTE_LINE_NUMBER (after_line),
4452 insn);
4455 /* Make an insn of code JUMP_INSN with body X
4456 and output it after the insn AFTER. */
4459 emit_jump_insn_after (x, after)
4460 rtx x, after;
4462 rtx last;
4464 #ifdef ENABLE_RTL_CHECKING
4465 if (after == NULL_RTX)
4466 abort ();
4467 #endif
4469 switch (GET_CODE (x))
4471 case INSN:
4472 case JUMP_INSN:
4473 case CALL_INSN:
4474 case CODE_LABEL:
4475 case BARRIER:
4476 case NOTE:
4477 last = emit_insn_after_1 (x, after);
4478 break;
4480 #ifdef ENABLE_RTL_CHECKING
4481 case SEQUENCE:
4482 abort ();
4483 break;
4484 #endif
4486 default:
4487 last = make_jump_insn_raw (x);
4488 add_insn_after (last, after);
4489 break;
4492 return last;
4495 /* Make an instruction with body X and code CALL_INSN
4496 and output it after the instruction AFTER. */
4499 emit_call_insn_after (x, after)
4500 rtx x, after;
4502 rtx last;
4504 #ifdef ENABLE_RTL_CHECKING
4505 if (after == NULL_RTX)
4506 abort ();
4507 #endif
4509 switch (GET_CODE (x))
4511 case INSN:
4512 case JUMP_INSN:
4513 case CALL_INSN:
4514 case CODE_LABEL:
4515 case BARRIER:
4516 case NOTE:
4517 last = emit_insn_after_1 (x, after);
4518 break;
4520 #ifdef ENABLE_RTL_CHECKING
4521 case SEQUENCE:
4522 abort ();
4523 break;
4524 #endif
4526 default:
4527 last = make_call_insn_raw (x);
4528 add_insn_after (last, after);
4529 break;
4532 return last;
4535 /* Make an insn of code BARRIER
4536 and output it after the insn AFTER. */
4539 emit_barrier_after (after)
4540 rtx after;
4542 rtx insn = rtx_alloc (BARRIER);
4544 INSN_UID (insn) = cur_insn_uid++;
4546 add_insn_after (insn, after);
4547 return insn;
4550 /* Emit the label LABEL after the insn AFTER. */
4553 emit_label_after (label, after)
4554 rtx label, after;
4556 /* This can be called twice for the same label
4557 as a result of the confusion that follows a syntax error!
4558 So make it harmless. */
4559 if (INSN_UID (label) == 0)
4561 INSN_UID (label) = cur_insn_uid++;
4562 add_insn_after (label, after);
4565 return label;
4568 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4571 emit_note_after (subtype, after)
4572 int subtype;
4573 rtx after;
4575 rtx note = rtx_alloc (NOTE);
4576 INSN_UID (note) = cur_insn_uid++;
4577 NOTE_SOURCE_FILE (note) = 0;
4578 NOTE_LINE_NUMBER (note) = subtype;
4579 BLOCK_FOR_INSN (note) = NULL;
4580 add_insn_after (note, after);
4581 return note;
4584 /* Emit a line note for FILE and LINE after the insn AFTER. */
4587 emit_line_note_after (file, line, after)
4588 const char *file;
4589 int line;
4590 rtx after;
4592 rtx note;
4594 if (no_line_numbers && line > 0)
4596 cur_insn_uid++;
4597 return 0;
4600 note = rtx_alloc (NOTE);
4601 INSN_UID (note) = cur_insn_uid++;
4602 NOTE_SOURCE_FILE (note) = file;
4603 NOTE_LINE_NUMBER (note) = line;
4604 BLOCK_FOR_INSN (note) = NULL;
4605 add_insn_after (note, after);
4606 return note;
4609 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4611 emit_insn_after_scope (pattern, after, scope)
4612 rtx pattern, after;
4613 tree scope;
4615 rtx last = emit_insn_after (pattern, after);
4617 after = NEXT_INSN (after);
4618 while (1)
4620 if (active_insn_p (after))
4621 INSN_SCOPE (after) = scope;
4622 if (after == last)
4623 break;
4624 after = NEXT_INSN (after);
4626 return last;
4629 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4631 emit_jump_insn_after_scope (pattern, after, scope)
4632 rtx pattern, after;
4633 tree scope;
4635 rtx last = emit_jump_insn_after (pattern, after);
4637 after = NEXT_INSN (after);
4638 while (1)
4640 if (active_insn_p (after))
4641 INSN_SCOPE (after) = scope;
4642 if (after == last)
4643 break;
4644 after = NEXT_INSN (after);
4646 return last;
4649 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4651 emit_call_insn_after_scope (pattern, after, scope)
4652 rtx pattern, after;
4653 tree scope;
4655 rtx last = emit_call_insn_after (pattern, after);
4657 after = NEXT_INSN (after);
4658 while (1)
4660 if (active_insn_p (after))
4661 INSN_SCOPE (after) = scope;
4662 if (after == last)
4663 break;
4664 after = NEXT_INSN (after);
4666 return last;
4669 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4671 emit_insn_before_scope (pattern, before, scope)
4672 rtx pattern, before;
4673 tree scope;
4675 rtx first = PREV_INSN (before);
4676 rtx last = emit_insn_before (pattern, before);
4678 first = NEXT_INSN (first);
4679 while (1)
4681 if (active_insn_p (first))
4682 INSN_SCOPE (first) = scope;
4683 if (first == last)
4684 break;
4685 first = NEXT_INSN (first);
4687 return last;
4690 /* Take X and emit it at the end of the doubly-linked
4691 INSN list.
4693 Returns the last insn emitted. */
4696 emit_insn (x)
4697 rtx x;
4699 rtx last = last_insn;
4700 rtx insn;
4702 if (x == NULL_RTX)
4703 return last;
4705 switch (GET_CODE (x))
4707 case INSN:
4708 case JUMP_INSN:
4709 case CALL_INSN:
4710 case CODE_LABEL:
4711 case BARRIER:
4712 case NOTE:
4713 insn = x;
4714 while (insn)
4716 rtx next = NEXT_INSN (insn);
4717 add_insn (insn);
4718 last = insn;
4719 insn = next;
4721 break;
4723 #ifdef ENABLE_RTL_CHECKING
4724 case SEQUENCE:
4725 abort ();
4726 break;
4727 #endif
4729 default:
4730 last = make_insn_raw (x);
4731 add_insn (last);
4732 break;
4735 return last;
4738 /* Make an insn of code JUMP_INSN with pattern X
4739 and add it to the end of the doubly-linked list. */
4742 emit_jump_insn (x)
4743 rtx x;
4745 rtx last = NULL_RTX, insn;
4747 switch (GET_CODE (x))
4749 case INSN:
4750 case JUMP_INSN:
4751 case CALL_INSN:
4752 case CODE_LABEL:
4753 case BARRIER:
4754 case NOTE:
4755 insn = x;
4756 while (insn)
4758 rtx next = NEXT_INSN (insn);
4759 add_insn (insn);
4760 last = insn;
4761 insn = next;
4763 break;
4765 #ifdef ENABLE_RTL_CHECKING
4766 case SEQUENCE:
4767 abort ();
4768 break;
4769 #endif
4771 default:
4772 last = make_jump_insn_raw (x);
4773 add_insn (last);
4774 break;
4777 return last;
4780 /* Make an insn of code CALL_INSN with pattern X
4781 and add it to the end of the doubly-linked list. */
4784 emit_call_insn (x)
4785 rtx x;
4787 rtx insn;
4789 switch (GET_CODE (x))
4791 case INSN:
4792 case JUMP_INSN:
4793 case CALL_INSN:
4794 case CODE_LABEL:
4795 case BARRIER:
4796 case NOTE:
4797 insn = emit_insn (x);
4798 break;
4800 #ifdef ENABLE_RTL_CHECKING
4801 case SEQUENCE:
4802 abort ();
4803 break;
4804 #endif
4806 default:
4807 insn = make_call_insn_raw (x);
4808 add_insn (insn);
4809 break;
4812 return insn;
4815 /* Add the label LABEL to the end of the doubly-linked list. */
4818 emit_label (label)
4819 rtx label;
4821 /* This can be called twice for the same label
4822 as a result of the confusion that follows a syntax error!
4823 So make it harmless. */
4824 if (INSN_UID (label) == 0)
4826 INSN_UID (label) = cur_insn_uid++;
4827 add_insn (label);
4829 return label;
4832 /* Make an insn of code BARRIER
4833 and add it to the end of the doubly-linked list. */
4836 emit_barrier ()
4838 rtx barrier = rtx_alloc (BARRIER);
4839 INSN_UID (barrier) = cur_insn_uid++;
4840 add_insn (barrier);
4841 return barrier;
4844 /* Make an insn of code NOTE
4845 with data-fields specified by FILE and LINE
4846 and add it to the end of the doubly-linked list,
4847 but only if line-numbers are desired for debugging info. */
4850 emit_line_note (file, line)
4851 const char *file;
4852 int line;
4854 set_file_and_line_for_stmt (file, line);
4856 #if 0
4857 if (no_line_numbers)
4858 return 0;
4859 #endif
4861 return emit_note (file, line);
4864 /* Make an insn of code NOTE
4865 with data-fields specified by FILE and LINE
4866 and add it to the end of the doubly-linked list.
4867 If it is a line-number NOTE, omit it if it matches the previous one. */
4870 emit_note (file, line)
4871 const char *file;
4872 int line;
4874 rtx note;
4876 if (line > 0)
4878 if (file && last_filename && !strcmp (file, last_filename)
4879 && line == last_linenum)
4880 return 0;
4881 last_filename = file;
4882 last_linenum = line;
4885 if (no_line_numbers && line > 0)
4887 cur_insn_uid++;
4888 return 0;
4891 note = rtx_alloc (NOTE);
4892 INSN_UID (note) = cur_insn_uid++;
4893 NOTE_SOURCE_FILE (note) = file;
4894 NOTE_LINE_NUMBER (note) = line;
4895 BLOCK_FOR_INSN (note) = NULL;
4896 add_insn (note);
4897 return note;
4900 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4903 emit_line_note_force (file, line)
4904 const char *file;
4905 int line;
4907 last_linenum = -1;
4908 return emit_line_note (file, line);
4911 /* Cause next statement to emit a line note even if the line number
4912 has not changed. This is used at the beginning of a function. */
4914 void
4915 force_next_line_note ()
4917 last_linenum = -1;
4920 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4921 note of this type already exists, remove it first. */
4924 set_unique_reg_note (insn, kind, datum)
4925 rtx insn;
4926 enum reg_note kind;
4927 rtx datum;
4929 rtx note = find_reg_note (insn, kind, NULL_RTX);
4931 switch (kind)
4933 case REG_EQUAL:
4934 case REG_EQUIV:
4935 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4936 has multiple sets (some callers assume single_set
4937 means the insn only has one set, when in fact it
4938 means the insn only has one * useful * set). */
4939 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4941 if (note)
4942 abort ();
4943 return NULL_RTX;
4946 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4947 It serves no useful purpose and breaks eliminate_regs. */
4948 if (GET_CODE (datum) == ASM_OPERANDS)
4949 return NULL_RTX;
4950 break;
4952 default:
4953 break;
4956 if (note)
4958 XEXP (note, 0) = datum;
4959 return note;
4962 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4963 return REG_NOTES (insn);
4966 /* Return an indication of which type of insn should have X as a body.
4967 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4969 enum rtx_code
4970 classify_insn (x)
4971 rtx x;
4973 if (GET_CODE (x) == CODE_LABEL)
4974 return CODE_LABEL;
4975 if (GET_CODE (x) == CALL)
4976 return CALL_INSN;
4977 if (GET_CODE (x) == RETURN)
4978 return JUMP_INSN;
4979 if (GET_CODE (x) == SET)
4981 if (SET_DEST (x) == pc_rtx)
4982 return JUMP_INSN;
4983 else if (GET_CODE (SET_SRC (x)) == CALL)
4984 return CALL_INSN;
4985 else
4986 return INSN;
4988 if (GET_CODE (x) == PARALLEL)
4990 int j;
4991 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4992 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4993 return CALL_INSN;
4994 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4995 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4996 return JUMP_INSN;
4997 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4998 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4999 return CALL_INSN;
5001 return INSN;
5004 /* Emit the rtl pattern X as an appropriate kind of insn.
5005 If X is a label, it is simply added into the insn chain. */
5008 emit (x)
5009 rtx x;
5011 enum rtx_code code = classify_insn (x);
5013 if (code == CODE_LABEL)
5014 return emit_label (x);
5015 else if (code == INSN)
5016 return emit_insn (x);
5017 else if (code == JUMP_INSN)
5019 rtx insn = emit_jump_insn (x);
5020 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5021 return emit_barrier ();
5022 return insn;
5024 else if (code == CALL_INSN)
5025 return emit_call_insn (x);
5026 else
5027 abort ();
5030 /* Space for free sequence stack entries. */
5031 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5033 /* Begin emitting insns to a sequence which can be packaged in an
5034 RTL_EXPR. If this sequence will contain something that might cause
5035 the compiler to pop arguments to function calls (because those
5036 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5037 details), use do_pending_stack_adjust before calling this function.
5038 That will ensure that the deferred pops are not accidentally
5039 emitted in the middle of this sequence. */
5041 void
5042 start_sequence ()
5044 struct sequence_stack *tem;
5046 if (free_sequence_stack != NULL)
5048 tem = free_sequence_stack;
5049 free_sequence_stack = tem->next;
5051 else
5052 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5054 tem->next = seq_stack;
5055 tem->first = first_insn;
5056 tem->last = last_insn;
5057 tem->sequence_rtl_expr = seq_rtl_expr;
5059 seq_stack = tem;
5061 first_insn = 0;
5062 last_insn = 0;
5065 /* Similarly, but indicate that this sequence will be placed in T, an
5066 RTL_EXPR. See the documentation for start_sequence for more
5067 information about how to use this function. */
5069 void
5070 start_sequence_for_rtl_expr (t)
5071 tree t;
5073 start_sequence ();
5075 seq_rtl_expr = t;
5078 /* Set up the insn chain starting with FIRST as the current sequence,
5079 saving the previously current one. See the documentation for
5080 start_sequence for more information about how to use this function. */
5082 void
5083 push_to_sequence (first)
5084 rtx first;
5086 rtx last;
5088 start_sequence ();
5090 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5092 first_insn = first;
5093 last_insn = last;
5096 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5098 void
5099 push_to_full_sequence (first, last)
5100 rtx first, last;
5102 start_sequence ();
5103 first_insn = first;
5104 last_insn = last;
5105 /* We really should have the end of the insn chain here. */
5106 if (last && NEXT_INSN (last))
5107 abort ();
5110 /* Set up the outer-level insn chain
5111 as the current sequence, saving the previously current one. */
5113 void
5114 push_topmost_sequence ()
5116 struct sequence_stack *stack, *top = NULL;
5118 start_sequence ();
5120 for (stack = seq_stack; stack; stack = stack->next)
5121 top = stack;
5123 first_insn = top->first;
5124 last_insn = top->last;
5125 seq_rtl_expr = top->sequence_rtl_expr;
5128 /* After emitting to the outer-level insn chain, update the outer-level
5129 insn chain, and restore the previous saved state. */
5131 void
5132 pop_topmost_sequence ()
5134 struct sequence_stack *stack, *top = NULL;
5136 for (stack = seq_stack; stack; stack = stack->next)
5137 top = stack;
5139 top->first = first_insn;
5140 top->last = last_insn;
5141 /* ??? Why don't we save seq_rtl_expr here? */
5143 end_sequence ();
5146 /* After emitting to a sequence, restore previous saved state.
5148 To get the contents of the sequence just made, you must call
5149 `get_insns' *before* calling here.
5151 If the compiler might have deferred popping arguments while
5152 generating this sequence, and this sequence will not be immediately
5153 inserted into the instruction stream, use do_pending_stack_adjust
5154 before calling get_insns. That will ensure that the deferred
5155 pops are inserted into this sequence, and not into some random
5156 location in the instruction stream. See INHIBIT_DEFER_POP for more
5157 information about deferred popping of arguments. */
5159 void
5160 end_sequence ()
5162 struct sequence_stack *tem = seq_stack;
5164 first_insn = tem->first;
5165 last_insn = tem->last;
5166 seq_rtl_expr = tem->sequence_rtl_expr;
5167 seq_stack = tem->next;
5169 memset (tem, 0, sizeof (*tem));
5170 tem->next = free_sequence_stack;
5171 free_sequence_stack = tem;
5174 /* This works like end_sequence, but records the old sequence in FIRST
5175 and LAST. */
5177 void
5178 end_full_sequence (first, last)
5179 rtx *first, *last;
5181 *first = first_insn;
5182 *last = last_insn;
5183 end_sequence ();
5186 /* Return 1 if currently emitting into a sequence. */
5189 in_sequence_p ()
5191 return seq_stack != 0;
5194 /* Put the various virtual registers into REGNO_REG_RTX. */
5196 void
5197 init_virtual_regs (es)
5198 struct emit_status *es;
5200 rtx *ptr = es->x_regno_reg_rtx;
5201 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5202 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5203 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5204 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5205 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5209 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5210 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5211 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5212 static int copy_insn_n_scratches;
5214 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5215 copied an ASM_OPERANDS.
5216 In that case, it is the original input-operand vector. */
5217 static rtvec orig_asm_operands_vector;
5219 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5220 copied an ASM_OPERANDS.
5221 In that case, it is the copied input-operand vector. */
5222 static rtvec copy_asm_operands_vector;
5224 /* Likewise for the constraints vector. */
5225 static rtvec orig_asm_constraints_vector;
5226 static rtvec copy_asm_constraints_vector;
5228 /* Recursively create a new copy of an rtx for copy_insn.
5229 This function differs from copy_rtx in that it handles SCRATCHes and
5230 ASM_OPERANDs properly.
5231 Normally, this function is not used directly; use copy_insn as front end.
5232 However, you could first copy an insn pattern with copy_insn and then use
5233 this function afterwards to properly copy any REG_NOTEs containing
5234 SCRATCHes. */
5237 copy_insn_1 (orig)
5238 rtx orig;
5240 rtx copy;
5241 int i, j;
5242 RTX_CODE code;
5243 const char *format_ptr;
5245 code = GET_CODE (orig);
5247 switch (code)
5249 case REG:
5250 case QUEUED:
5251 case CONST_INT:
5252 case CONST_DOUBLE:
5253 case CONST_VECTOR:
5254 case SYMBOL_REF:
5255 case CODE_LABEL:
5256 case PC:
5257 case CC0:
5258 case ADDRESSOF:
5259 return orig;
5261 case SCRATCH:
5262 for (i = 0; i < copy_insn_n_scratches; i++)
5263 if (copy_insn_scratch_in[i] == orig)
5264 return copy_insn_scratch_out[i];
5265 break;
5267 case CONST:
5268 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5269 a LABEL_REF, it isn't sharable. */
5270 if (GET_CODE (XEXP (orig, 0)) == PLUS
5271 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5272 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5273 return orig;
5274 break;
5276 /* A MEM with a constant address is not sharable. The problem is that
5277 the constant address may need to be reloaded. If the mem is shared,
5278 then reloading one copy of this mem will cause all copies to appear
5279 to have been reloaded. */
5281 default:
5282 break;
5285 copy = rtx_alloc (code);
5287 /* Copy the various flags, and other information. We assume that
5288 all fields need copying, and then clear the fields that should
5289 not be copied. That is the sensible default behavior, and forces
5290 us to explicitly document why we are *not* copying a flag. */
5291 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5293 /* We do not copy the USED flag, which is used as a mark bit during
5294 walks over the RTL. */
5295 RTX_FLAG (copy, used) = 0;
5297 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5298 if (GET_RTX_CLASS (code) == 'i')
5300 RTX_FLAG (copy, jump) = 0;
5301 RTX_FLAG (copy, call) = 0;
5302 RTX_FLAG (copy, frame_related) = 0;
5305 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5307 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5309 copy->fld[i] = orig->fld[i];
5310 switch (*format_ptr++)
5312 case 'e':
5313 if (XEXP (orig, i) != NULL)
5314 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5315 break;
5317 case 'E':
5318 case 'V':
5319 if (XVEC (orig, i) == orig_asm_constraints_vector)
5320 XVEC (copy, i) = copy_asm_constraints_vector;
5321 else if (XVEC (orig, i) == orig_asm_operands_vector)
5322 XVEC (copy, i) = copy_asm_operands_vector;
5323 else if (XVEC (orig, i) != NULL)
5325 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5326 for (j = 0; j < XVECLEN (copy, i); j++)
5327 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5329 break;
5331 case 't':
5332 case 'w':
5333 case 'i':
5334 case 's':
5335 case 'S':
5336 case 'u':
5337 case '0':
5338 /* These are left unchanged. */
5339 break;
5341 default:
5342 abort ();
5346 if (code == SCRATCH)
5348 i = copy_insn_n_scratches++;
5349 if (i >= MAX_RECOG_OPERANDS)
5350 abort ();
5351 copy_insn_scratch_in[i] = orig;
5352 copy_insn_scratch_out[i] = copy;
5354 else if (code == ASM_OPERANDS)
5356 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5357 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5358 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5359 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5362 return copy;
5365 /* Create a new copy of an rtx.
5366 This function differs from copy_rtx in that it handles SCRATCHes and
5367 ASM_OPERANDs properly.
5368 INSN doesn't really have to be a full INSN; it could be just the
5369 pattern. */
5371 copy_insn (insn)
5372 rtx insn;
5374 copy_insn_n_scratches = 0;
5375 orig_asm_operands_vector = 0;
5376 orig_asm_constraints_vector = 0;
5377 copy_asm_operands_vector = 0;
5378 copy_asm_constraints_vector = 0;
5379 return copy_insn_1 (insn);
5382 /* Initialize data structures and variables in this file
5383 before generating rtl for each function. */
5385 void
5386 init_emit ()
5388 struct function *f = cfun;
5390 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5391 first_insn = NULL;
5392 last_insn = NULL;
5393 seq_rtl_expr = NULL;
5394 cur_insn_uid = 1;
5395 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5396 last_linenum = 0;
5397 last_filename = 0;
5398 first_label_num = label_num;
5399 last_label_num = 0;
5400 seq_stack = NULL;
5402 /* Init the tables that describe all the pseudo regs. */
5404 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5406 f->emit->regno_pointer_align
5407 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5408 * sizeof (unsigned char));
5410 regno_reg_rtx
5411 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5413 /* Put copies of all the hard registers into regno_reg_rtx. */
5414 memcpy (regno_reg_rtx,
5415 static_regno_reg_rtx,
5416 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5418 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5419 init_virtual_regs (f->emit);
5421 /* Indicate that the virtual registers and stack locations are
5422 all pointers. */
5423 REG_POINTER (stack_pointer_rtx) = 1;
5424 REG_POINTER (frame_pointer_rtx) = 1;
5425 REG_POINTER (hard_frame_pointer_rtx) = 1;
5426 REG_POINTER (arg_pointer_rtx) = 1;
5428 REG_POINTER (virtual_incoming_args_rtx) = 1;
5429 REG_POINTER (virtual_stack_vars_rtx) = 1;
5430 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5431 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5432 REG_POINTER (virtual_cfa_rtx) = 1;
5434 #ifdef STACK_BOUNDARY
5435 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5436 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5437 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5438 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5440 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5441 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5442 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5443 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5444 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5445 #endif
5447 #ifdef INIT_EXPANDERS
5448 INIT_EXPANDERS;
5449 #endif
5452 /* Generate the constant 0. */
5454 static rtx
5455 gen_const_vector_0 (mode)
5456 enum machine_mode mode;
5458 rtx tem;
5459 rtvec v;
5460 int units, i;
5461 enum machine_mode inner;
5463 units = GET_MODE_NUNITS (mode);
5464 inner = GET_MODE_INNER (mode);
5466 v = rtvec_alloc (units);
5468 /* We need to call this function after we to set CONST0_RTX first. */
5469 if (!CONST0_RTX (inner))
5470 abort ();
5472 for (i = 0; i < units; ++i)
5473 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5475 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5476 return tem;
5479 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5480 all elements are zero. */
5482 gen_rtx_CONST_VECTOR (mode, v)
5483 enum machine_mode mode;
5484 rtvec v;
5486 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5487 int i;
5489 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5490 if (RTVEC_ELT (v, i) != inner_zero)
5491 return gen_rtx_raw_CONST_VECTOR (mode, v);
5492 return CONST0_RTX (mode);
5495 /* Create some permanent unique rtl objects shared between all functions.
5496 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5498 void
5499 init_emit_once (line_numbers)
5500 int line_numbers;
5502 int i;
5503 enum machine_mode mode;
5504 enum machine_mode double_mode;
5506 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5507 tables. */
5508 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5509 const_int_htab_eq, NULL);
5511 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5512 const_double_htab_eq, NULL);
5514 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5515 mem_attrs_htab_eq, NULL);
5516 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5517 reg_attrs_htab_eq, NULL);
5519 no_line_numbers = ! line_numbers;
5521 /* Compute the word and byte modes. */
5523 byte_mode = VOIDmode;
5524 word_mode = VOIDmode;
5525 double_mode = VOIDmode;
5527 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5528 mode = GET_MODE_WIDER_MODE (mode))
5530 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5531 && byte_mode == VOIDmode)
5532 byte_mode = mode;
5534 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5535 && word_mode == VOIDmode)
5536 word_mode = mode;
5539 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5540 mode = GET_MODE_WIDER_MODE (mode))
5542 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5543 && double_mode == VOIDmode)
5544 double_mode = mode;
5547 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5549 /* Assign register numbers to the globally defined register rtx.
5550 This must be done at runtime because the register number field
5551 is in a union and some compilers can't initialize unions. */
5553 pc_rtx = gen_rtx (PC, VOIDmode);
5554 cc0_rtx = gen_rtx (CC0, VOIDmode);
5555 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5556 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5557 if (hard_frame_pointer_rtx == 0)
5558 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5559 HARD_FRAME_POINTER_REGNUM);
5560 if (arg_pointer_rtx == 0)
5561 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5562 virtual_incoming_args_rtx =
5563 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5564 virtual_stack_vars_rtx =
5565 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5566 virtual_stack_dynamic_rtx =
5567 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5568 virtual_outgoing_args_rtx =
5569 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5570 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5572 /* Initialize RTL for commonly used hard registers. These are
5573 copied into regno_reg_rtx as we begin to compile each function. */
5574 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5575 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5577 #ifdef INIT_EXPANDERS
5578 /* This is to initialize {init|mark|free}_machine_status before the first
5579 call to push_function_context_to. This is needed by the Chill front
5580 end which calls push_function_context_to before the first call to
5581 init_function_start. */
5582 INIT_EXPANDERS;
5583 #endif
5585 /* Create the unique rtx's for certain rtx codes and operand values. */
5587 /* Don't use gen_rtx here since gen_rtx in this case
5588 tries to use these variables. */
5589 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5590 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5591 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5593 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5594 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5595 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5596 else
5597 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5599 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5600 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5601 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5602 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5603 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5605 dconsthalf = dconst1;
5606 dconsthalf.exp--;
5608 for (i = 0; i <= 2; i++)
5610 REAL_VALUE_TYPE *r =
5611 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5614 mode = GET_MODE_WIDER_MODE (mode))
5615 const_tiny_rtx[i][(int) mode] =
5616 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5618 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5621 mode = GET_MODE_WIDER_MODE (mode))
5622 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5625 mode != VOIDmode;
5626 mode = GET_MODE_WIDER_MODE (mode))
5627 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5630 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5631 mode != VOIDmode;
5632 mode = GET_MODE_WIDER_MODE (mode))
5633 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5636 mode != VOIDmode;
5637 mode = GET_MODE_WIDER_MODE (mode))
5638 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5640 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5641 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5642 const_tiny_rtx[0][i] = const0_rtx;
5644 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5645 if (STORE_FLAG_VALUE == 1)
5646 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5648 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5649 return_address_pointer_rtx
5650 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5651 #endif
5653 #ifdef STRUCT_VALUE
5654 struct_value_rtx = STRUCT_VALUE;
5655 #else
5656 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5657 #endif
5659 #ifdef STRUCT_VALUE_INCOMING
5660 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5661 #else
5662 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5663 struct_value_incoming_rtx
5664 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5665 #else
5666 struct_value_incoming_rtx = struct_value_rtx;
5667 #endif
5668 #endif
5670 #ifdef STATIC_CHAIN_REGNUM
5671 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5673 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5674 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5675 static_chain_incoming_rtx
5676 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5677 else
5678 #endif
5679 static_chain_incoming_rtx = static_chain_rtx;
5680 #endif
5682 #ifdef STATIC_CHAIN
5683 static_chain_rtx = STATIC_CHAIN;
5685 #ifdef STATIC_CHAIN_INCOMING
5686 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5687 #else
5688 static_chain_incoming_rtx = static_chain_rtx;
5689 #endif
5690 #endif
5692 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5693 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5696 /* Query and clear/ restore no_line_numbers. This is used by the
5697 switch / case handling in stmt.c to give proper line numbers in
5698 warnings about unreachable code. */
5701 force_line_numbers ()
5703 int old = no_line_numbers;
5705 no_line_numbers = 0;
5706 if (old)
5707 force_next_line_note ();
5708 return old;
5711 void
5712 restore_line_number_status (old_value)
5713 int old_value;
5715 no_line_numbers = old_value;
5718 /* Produce exact duplicate of insn INSN after AFTER.
5719 Care updating of libcall regions if present. */
5722 emit_copy_of_insn_after (insn, after)
5723 rtx insn, after;
5725 rtx new;
5726 rtx note1, note2, link;
5728 switch (GET_CODE (insn))
5730 case INSN:
5731 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5732 break;
5734 case JUMP_INSN:
5735 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5736 break;
5738 case CALL_INSN:
5739 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5740 if (CALL_INSN_FUNCTION_USAGE (insn))
5741 CALL_INSN_FUNCTION_USAGE (new)
5742 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5743 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5744 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5745 break;
5747 default:
5748 abort ();
5751 /* Update LABEL_NUSES. */
5752 mark_jump_label (PATTERN (new), new, 0);
5754 INSN_SCOPE (new) = INSN_SCOPE (insn);
5756 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5757 make them. */
5758 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5759 if (REG_NOTE_KIND (link) != REG_LABEL)
5761 if (GET_CODE (link) == EXPR_LIST)
5762 REG_NOTES (new)
5763 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5764 XEXP (link, 0),
5765 REG_NOTES (new)));
5766 else
5767 REG_NOTES (new)
5768 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5769 XEXP (link, 0),
5770 REG_NOTES (new)));
5773 /* Fix the libcall sequences. */
5774 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5776 rtx p = new;
5777 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5778 p = PREV_INSN (p);
5779 XEXP (note1, 0) = p;
5780 XEXP (note2, 0) = new;
5782 INSN_CODE (new) = INSN_CODE (insn);
5783 return new;
5786 #include "gt-emit-rtl.h"