* config/arm/elf.h (ASM_OUTPUT_ALIGNED_COMMON): Remove definition.
[official-gcc.git] / gcc / emit-rtl.c
blob7048aee61dfe5fefa3ea557f481b1635e9151981
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
61 /* Commonly used modes. */
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
72 static GTY(()) int label_num = 1;
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
78 static int last_label_num;
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
83 static int base_label_num;
85 /* Nonzero means do not generate NOTEs for source line numbers. */
87 static int no_line_numbers;
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
94 rtx global_rtl[GR_MAX];
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
108 rtx const_true_rtx;
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141 /* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145 /* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
150 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152 /* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
158 /* A hash table storing memory attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
162 /* A hash table storing register attribute structures. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
166 /* A hash table storing all CONST_DOUBLEs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
181 int));
182 static void unshare_all_rtl_1 PARAMS ((rtx));
183 static void unshare_all_decls PARAMS ((tree));
184 static void reset_used_decls PARAMS ((tree));
185 static void mark_label_nuses PARAMS ((rtx));
186 static hashval_t const_int_htab_hash PARAMS ((const void *));
187 static int const_int_htab_eq PARAMS ((const void *,
188 const void *));
189 static hashval_t const_double_htab_hash PARAMS ((const void *));
190 static int const_double_htab_eq PARAMS ((const void *,
191 const void *));
192 static rtx lookup_const_double PARAMS ((rtx));
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
195 const void *));
196 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
197 rtx, unsigned int,
198 enum machine_mode));
199 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
200 static int reg_attrs_htab_eq PARAMS ((const void *,
201 const void *));
202 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
203 static tree component_ref_for_mem_expr PARAMS ((tree));
204 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
206 /* Probability of the conditional branch currently proceeded by try_split.
207 Set to -1 otherwise. */
208 int split_branch_probability = -1;
210 /* Returns a hash code for X (which is a really a CONST_INT). */
212 static hashval_t
213 const_int_htab_hash (x)
214 const void *x;
216 return (hashval_t) INTVAL ((struct rtx_def *) x);
219 /* Returns nonzero if the value represented by X (which is really a
220 CONST_INT) is the same as that given by Y (which is really a
221 HOST_WIDE_INT *). */
223 static int
224 const_int_htab_eq (x, y)
225 const void *x;
226 const void *y;
228 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
231 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
232 static hashval_t
233 const_double_htab_hash (x)
234 const void *x;
236 rtx value = (rtx) x;
237 hashval_t h;
239 if (GET_MODE (value) == VOIDmode)
240 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
241 else
243 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
244 /* MODE is used in the comparison, so it should be in the hash. */
245 h ^= GET_MODE (value);
247 return h;
250 /* Returns nonzero if the value represented by X (really a ...)
251 is the same as that represented by Y (really a ...) */
252 static int
253 const_double_htab_eq (x, y)
254 const void *x;
255 const void *y;
257 rtx a = (rtx)x, b = (rtx)y;
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
261 if (GET_MODE (a) == VOIDmode)
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
269 /* Returns a hash code for X (which is a really a mem_attrs *). */
271 static hashval_t
272 mem_attrs_htab_hash (x)
273 const void *x;
275 mem_attrs *p = (mem_attrs *) x;
277 return (p->alias ^ (p->align * 1000)
278 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
279 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
280 ^ (size_t) p->expr);
283 /* Returns nonzero if the value represented by X (which is really a
284 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs *). */
287 static int
288 mem_attrs_htab_eq (x, y)
289 const void *x;
290 const void *y;
292 mem_attrs *p = (mem_attrs *) x;
293 mem_attrs *q = (mem_attrs *) y;
295 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
296 && p->size == q->size && p->align == q->align);
299 /* Allocate a new mem_attrs structure and insert it into the hash table if
300 one identical to it is not already in the table. We are doing this for
301 MEM of mode MODE. */
303 static mem_attrs *
304 get_mem_attrs (alias, expr, offset, size, align, mode)
305 HOST_WIDE_INT alias;
306 tree expr;
307 rtx offset;
308 rtx size;
309 unsigned int align;
310 enum machine_mode mode;
312 mem_attrs attrs;
313 void **slot;
315 /* If everything is the default, we can just return zero.
316 This must match what the corresponding MEM_* macros return when the
317 field is not present. */
318 if (alias == 0 && expr == 0 && offset == 0
319 && (size == 0
320 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
321 && (STRICT_ALIGNMENT && mode != BLKmode
322 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
323 return 0;
325 attrs.alias = alias;
326 attrs.expr = expr;
327 attrs.offset = offset;
328 attrs.size = size;
329 attrs.align = align;
331 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
332 if (*slot == 0)
334 *slot = ggc_alloc (sizeof (mem_attrs));
335 memcpy (*slot, &attrs, sizeof (mem_attrs));
338 return *slot;
341 /* Returns a hash code for X (which is a really a reg_attrs *). */
343 static hashval_t
344 reg_attrs_htab_hash (x)
345 const void *x;
347 reg_attrs *p = (reg_attrs *) x;
349 return ((p->offset * 1000) ^ (long) p->decl);
352 /* Returns non-zero if the value represented by X (which is really a
353 reg_attrs *) is the same as that given by Y (which is also really a
354 reg_attrs *). */
356 static int
357 reg_attrs_htab_eq (x, y)
358 const void *x;
359 const void *y;
361 reg_attrs *p = (reg_attrs *) x;
362 reg_attrs *q = (reg_attrs *) y;
364 return (p->decl == q->decl && p->offset == q->offset);
366 /* Allocate a new reg_attrs structure and insert it into the hash table if
367 one identical to it is not already in the table. We are doing this for
368 MEM of mode MODE. */
370 static reg_attrs *
371 get_reg_attrs (decl, offset)
372 tree decl;
373 int offset;
375 reg_attrs attrs;
376 void **slot;
378 /* If everything is the default, we can just return zero. */
379 if (decl == 0 && offset == 0)
380 return 0;
382 attrs.decl = decl;
383 attrs.offset = offset;
385 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
386 if (*slot == 0)
388 *slot = ggc_alloc (sizeof (reg_attrs));
389 memcpy (*slot, &attrs, sizeof (reg_attrs));
392 return *slot;
395 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
396 don't attempt to share with the various global pieces of rtl (such as
397 frame_pointer_rtx). */
400 gen_raw_REG (mode, regno)
401 enum machine_mode mode;
402 int regno;
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
409 /* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
414 gen_rtx_CONST_INT (mode, arg)
415 enum machine_mode mode ATTRIBUTE_UNUSED;
416 HOST_WIDE_INT arg;
418 void **slot;
420 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
421 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
423 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
424 if (const_true_rtx && arg == STORE_FLAG_VALUE)
425 return const_true_rtx;
426 #endif
428 /* Look up the CONST_INT in the hash table. */
429 slot = htab_find_slot_with_hash (const_int_htab, &arg,
430 (hashval_t) arg, INSERT);
431 if (*slot == 0)
432 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
434 return (rtx) *slot;
438 gen_int_mode (c, mode)
439 HOST_WIDE_INT c;
440 enum machine_mode mode;
442 return GEN_INT (trunc_int_for_mode (c, mode));
445 /* CONST_DOUBLEs might be created from pairs of integers, or from
446 REAL_VALUE_TYPEs. Also, their length is known only at run time,
447 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
449 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
450 hash table. If so, return its counterpart; otherwise add it
451 to the hash table and return it. */
452 static rtx
453 lookup_const_double (real)
454 rtx real;
456 void **slot = htab_find_slot (const_double_htab, real, INSERT);
457 if (*slot == 0)
458 *slot = real;
460 return (rtx) *slot;
463 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
464 VALUE in mode MODE. */
466 const_double_from_real_value (value, mode)
467 REAL_VALUE_TYPE value;
468 enum machine_mode mode;
470 rtx real = rtx_alloc (CONST_DOUBLE);
471 PUT_MODE (real, mode);
473 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
475 return lookup_const_double (real);
478 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
479 of ints: I0 is the low-order word and I1 is the high-order word.
480 Do not use this routine for non-integer modes; convert to
481 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
484 immed_double_const (i0, i1, mode)
485 HOST_WIDE_INT i0, i1;
486 enum machine_mode mode;
488 rtx value;
489 unsigned int i;
491 if (mode != VOIDmode)
493 int width;
494 if (GET_MODE_CLASS (mode) != MODE_INT
495 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
496 /* We can get a 0 for an error mark. */
497 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
498 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
499 abort ();
501 /* We clear out all bits that don't belong in MODE, unless they and
502 our sign bit are all one. So we get either a reasonable negative
503 value or a reasonable unsigned value for this mode. */
504 width = GET_MODE_BITSIZE (mode);
505 if (width < HOST_BITS_PER_WIDE_INT
506 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
507 != ((HOST_WIDE_INT) (-1) << (width - 1))))
508 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
509 else if (width == HOST_BITS_PER_WIDE_INT
510 && ! (i1 == ~0 && i0 < 0))
511 i1 = 0;
512 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
513 /* We cannot represent this value as a constant. */
514 abort ();
516 /* If this would be an entire word for the target, but is not for
517 the host, then sign-extend on the host so that the number will
518 look the same way on the host that it would on the target.
520 For example, when building a 64 bit alpha hosted 32 bit sparc
521 targeted compiler, then we want the 32 bit unsigned value -1 to be
522 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
523 The latter confuses the sparc backend. */
525 if (width < HOST_BITS_PER_WIDE_INT
526 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
527 i0 |= ((HOST_WIDE_INT) (-1) << width);
529 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
530 CONST_INT.
532 ??? Strictly speaking, this is wrong if we create a CONST_INT for
533 a large unsigned constant with the size of MODE being
534 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
535 in a wider mode. In that case we will mis-interpret it as a
536 negative number.
538 Unfortunately, the only alternative is to make a CONST_DOUBLE for
539 any constant in any mode if it is an unsigned constant larger
540 than the maximum signed integer in an int on the host. However,
541 doing this will break everyone that always expects to see a
542 CONST_INT for SImode and smaller.
544 We have always been making CONST_INTs in this case, so nothing
545 new is being broken. */
547 if (width <= HOST_BITS_PER_WIDE_INT)
548 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
551 /* If this integer fits in one word, return a CONST_INT. */
552 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
553 return GEN_INT (i0);
555 /* We use VOIDmode for integers. */
556 value = rtx_alloc (CONST_DOUBLE);
557 PUT_MODE (value, VOIDmode);
559 CONST_DOUBLE_LOW (value) = i0;
560 CONST_DOUBLE_HIGH (value) = i1;
562 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
563 XWINT (value, i) = 0;
565 return lookup_const_double (value);
569 gen_rtx_REG (mode, regno)
570 enum machine_mode mode;
571 unsigned int regno;
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (mode, addr)
637 enum machine_mode mode;
638 rtx addr;
640 rtx rt = gen_rtx_raw_MEM (mode, addr);
642 /* This field is not cleared by the mere allocation of the rtx, so
643 we clear it here. */
644 MEM_ATTRS (rt) = 0;
646 return rt;
650 gen_rtx_SUBREG (mode, reg, offset)
651 enum machine_mode mode;
652 rtx reg;
653 int offset;
655 /* This is the most common failure type.
656 Catch it early so we can see who does it. */
657 if ((offset % GET_MODE_SIZE (mode)) != 0)
658 abort ();
660 /* This check isn't usable right now because combine will
661 throw arbitrary crap like a CALL into a SUBREG in
662 gen_lowpart_for_combine so we must just eat it. */
663 #if 0
664 /* Check for this too. */
665 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
666 abort ();
667 #endif
668 return gen_rtx_raw_SUBREG (mode, reg, offset);
671 /* Generate a SUBREG representing the least-significant part of REG if MODE
672 is smaller than mode of REG, otherwise paradoxical SUBREG. */
675 gen_lowpart_SUBREG (mode, reg)
676 enum machine_mode mode;
677 rtx reg;
679 enum machine_mode inmode;
681 inmode = GET_MODE (reg);
682 if (inmode == VOIDmode)
683 inmode = mode;
684 return gen_rtx_SUBREG (mode, reg,
685 subreg_lowpart_offset (mode, inmode));
688 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
690 ** This routine generates an RTX of the size specified by
691 ** <code>, which is an RTX code. The RTX structure is initialized
692 ** from the arguments <element1> through <elementn>, which are
693 ** interpreted according to the specific RTX type's format. The
694 ** special machine mode associated with the rtx (if any) is specified
695 ** in <mode>.
697 ** gen_rtx can be invoked in a way which resembles the lisp-like
698 ** rtx it will generate. For example, the following rtx structure:
700 ** (plus:QI (mem:QI (reg:SI 1))
701 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
703 ** ...would be generated by the following C code:
705 ** gen_rtx (PLUS, QImode,
706 ** gen_rtx (MEM, QImode,
707 ** gen_rtx (REG, SImode, 1)),
708 ** gen_rtx (MEM, QImode,
709 ** gen_rtx (PLUS, SImode,
710 ** gen_rtx (REG, SImode, 2),
711 ** gen_rtx (REG, SImode, 3)))),
714 /*VARARGS2*/
716 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
718 int i; /* Array indices... */
719 const char *fmt; /* Current rtx's format... */
720 rtx rt_val; /* RTX to return to caller... */
722 VA_OPEN (p, mode);
723 VA_FIXEDARG (p, enum rtx_code, code);
724 VA_FIXEDARG (p, enum machine_mode, mode);
726 switch (code)
728 case CONST_INT:
729 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
730 break;
732 case CONST_DOUBLE:
734 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
735 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
737 rt_val = immed_double_const (arg0, arg1, mode);
739 break;
741 case REG:
742 rt_val = gen_rtx_REG (mode, va_arg (p, int));
743 break;
745 case MEM:
746 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
747 break;
749 default:
750 rt_val = rtx_alloc (code); /* Allocate the storage space. */
751 rt_val->mode = mode; /* Store the machine mode... */
753 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
754 for (i = 0; i < GET_RTX_LENGTH (code); i++)
756 switch (*fmt++)
758 case '0': /* Field with unknown use. Zero it. */
759 X0EXP (rt_val, i) = NULL_RTX;
760 break;
762 case 'i': /* An integer? */
763 XINT (rt_val, i) = va_arg (p, int);
764 break;
766 case 'w': /* A wide integer? */
767 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
768 break;
770 case 's': /* A string? */
771 XSTR (rt_val, i) = va_arg (p, char *);
772 break;
774 case 'e': /* An expression? */
775 case 'u': /* An insn? Same except when printing. */
776 XEXP (rt_val, i) = va_arg (p, rtx);
777 break;
779 case 'E': /* An RTX vector? */
780 XVEC (rt_val, i) = va_arg (p, rtvec);
781 break;
783 case 'b': /* A bitmap? */
784 XBITMAP (rt_val, i) = va_arg (p, bitmap);
785 break;
787 case 't': /* A tree? */
788 XTREE (rt_val, i) = va_arg (p, tree);
789 break;
791 default:
792 abort ();
795 break;
798 VA_CLOSE (p);
799 return rt_val;
802 /* gen_rtvec (n, [rt1, ..., rtn])
804 ** This routine creates an rtvec and stores within it the
805 ** pointers to rtx's which are its arguments.
808 /*VARARGS1*/
809 rtvec
810 gen_rtvec VPARAMS ((int n, ...))
812 int i, save_n;
813 rtx *vector;
815 VA_OPEN (p, n);
816 VA_FIXEDARG (p, int, n);
818 if (n == 0)
819 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
821 vector = (rtx *) alloca (n * sizeof (rtx));
823 for (i = 0; i < n; i++)
824 vector[i] = va_arg (p, rtx);
826 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
827 save_n = n;
828 VA_CLOSE (p);
830 return gen_rtvec_v (save_n, vector);
833 rtvec
834 gen_rtvec_v (n, argp)
835 int n;
836 rtx *argp;
838 int i;
839 rtvec rt_val;
841 if (n == 0)
842 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
844 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
846 for (i = 0; i < n; i++)
847 rt_val->elem[i] = *argp++;
849 return rt_val;
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (mode)
857 enum machine_mode mode;
859 struct function *f = cfun;
860 rtx val;
862 /* Don't let anything called after initial flow analysis create new
863 registers. */
864 if (no_new_pseudos)
865 abort ();
867 if (generating_concat_p
868 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
869 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
871 /* For complex modes, don't make a single pseudo.
872 Instead, make a CONCAT of two pseudos.
873 This allows noncontiguous allocation of the real and imaginary parts,
874 which makes much better code. Besides, allocating DCmode
875 pseudos overstrains reload on some machines like the 386. */
876 rtx realpart, imagpart;
877 enum machine_mode partmode = GET_MODE_INNER (mode);
879 realpart = gen_reg_rtx (partmode);
880 imagpart = gen_reg_rtx (partmode);
881 return gen_rtx_CONCAT (mode, realpart, imagpart);
884 /* Make sure regno_pointer_align, and regno_reg_rtx are large
885 enough to have an element for this pseudo reg number. */
887 if (reg_rtx_no == f->emit->regno_pointer_align_length)
889 int old_size = f->emit->regno_pointer_align_length;
890 char *new;
891 rtx *new1;
893 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
894 memset (new + old_size, 0, old_size);
895 f->emit->regno_pointer_align = (unsigned char *) new;
897 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
898 old_size * 2 * sizeof (rtx));
899 memset (new1 + old_size, 0, old_size * sizeof (rtx));
900 regno_reg_rtx = new1;
902 f->emit->regno_pointer_align_length = old_size * 2;
905 val = gen_raw_REG (mode, reg_rtx_no);
906 regno_reg_rtx[reg_rtx_no++] = val;
907 return val;
910 /* Generate an register with same attributes as REG,
911 but offsetted by OFFSET. */
914 gen_rtx_REG_offset (reg, mode, regno, offset)
915 enum machine_mode mode;
916 unsigned int regno;
917 int offset;
918 rtx reg;
920 rtx new = gen_rtx_REG (mode, regno);
921 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
923 return new;
926 /* Set the decl for MEM to DECL. */
928 void
929 set_reg_attrs_from_mem (reg, mem)
930 rtx reg;
931 rtx mem;
933 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
934 REG_ATTRS (reg)
935 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
938 /* Set the register attributes for registers contained in PARM_RTX.
939 Use needed values from memory attributes of MEM. */
941 void
942 set_reg_attrs_for_parm (parm_rtx, mem)
943 rtx parm_rtx;
944 rtx mem;
946 if (GET_CODE (parm_rtx) == REG)
947 set_reg_attrs_from_mem (parm_rtx, mem);
948 else if (GET_CODE (parm_rtx) == PARALLEL)
950 /* Check for a NULL entry in the first slot, used to indicate that the
951 parameter goes both on the stack and in registers. */
952 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
953 for (; i < XVECLEN (parm_rtx, 0); i++)
955 rtx x = XVECEXP (parm_rtx, 0, i);
956 if (GET_CODE (XEXP (x, 0)) == REG)
957 REG_ATTRS (XEXP (x, 0))
958 = get_reg_attrs (MEM_EXPR (mem),
959 INTVAL (XEXP (x, 1)));
964 /* Assign the RTX X to declaration T. */
965 void
966 set_decl_rtl (t, x)
967 tree t;
968 rtx x;
970 DECL_CHECK (t)->decl.rtl = x;
972 if (!x)
973 return;
974 /* For register, we maitain the reverse information too. */
975 if (GET_CODE (x) == REG)
976 REG_ATTRS (x) = get_reg_attrs (t, 0);
977 else if (GET_CODE (x) == SUBREG)
978 REG_ATTRS (SUBREG_REG (x))
979 = get_reg_attrs (t, -SUBREG_BYTE (x));
980 if (GET_CODE (x) == CONCAT)
982 if (REG_P (XEXP (x, 0)))
983 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
984 if (REG_P (XEXP (x, 1)))
985 REG_ATTRS (XEXP (x, 1))
986 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
988 if (GET_CODE (x) == PARALLEL)
990 int i;
991 for (i = 0; i < XVECLEN (x, 0); i++)
993 rtx y = XVECEXP (x, 0, i);
994 if (REG_P (XEXP (y, 0)))
995 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1000 /* Identify REG (which may be a CONCAT) as a user register. */
1002 void
1003 mark_user_reg (reg)
1004 rtx reg;
1006 if (GET_CODE (reg) == CONCAT)
1008 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1009 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1011 else if (GET_CODE (reg) == REG)
1012 REG_USERVAR_P (reg) = 1;
1013 else
1014 abort ();
1017 /* Identify REG as a probable pointer register and show its alignment
1018 as ALIGN, if nonzero. */
1020 void
1021 mark_reg_pointer (reg, align)
1022 rtx reg;
1023 int align;
1025 if (! REG_POINTER (reg))
1027 REG_POINTER (reg) = 1;
1029 if (align)
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1032 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1033 /* We can no-longer be sure just how aligned this pointer is */
1034 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1037 /* Return 1 plus largest pseudo reg number used in the current function. */
1040 max_reg_num ()
1042 return reg_rtx_no;
1045 /* Return 1 + the largest label number used so far in the current function. */
1048 max_label_num ()
1050 if (last_label_num && label_num == base_label_num)
1051 return last_label_num;
1052 return label_num;
1055 /* Return first label number used in this function (if any were used). */
1058 get_first_label_num ()
1060 return first_label_num;
1063 /* Return the final regno of X, which is a SUBREG of a hard
1064 register. */
1066 subreg_hard_regno (x, check_mode)
1067 rtx x;
1068 int check_mode;
1070 enum machine_mode mode = GET_MODE (x);
1071 unsigned int byte_offset, base_regno, final_regno;
1072 rtx reg = SUBREG_REG (x);
1074 /* This is where we attempt to catch illegal subregs
1075 created by the compiler. */
1076 if (GET_CODE (x) != SUBREG
1077 || GET_CODE (reg) != REG)
1078 abort ();
1079 base_regno = REGNO (reg);
1080 if (base_regno >= FIRST_PSEUDO_REGISTER)
1081 abort ();
1082 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1083 abort ();
1084 #ifdef ENABLE_CHECKING
1085 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1086 SUBREG_BYTE (x), mode))
1087 abort ();
1088 #endif
1089 /* Catch non-congruent offsets too. */
1090 byte_offset = SUBREG_BYTE (x);
1091 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1092 abort ();
1094 final_regno = subreg_regno (x);
1096 return final_regno;
1099 /* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
1101 between floating-point and fixed-point values, rather, the bit
1102 representation is returned.
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1108 If this is not a case we can handle, return 0. */
1111 gen_lowpart_common (mode, x)
1112 enum machine_mode mode;
1113 rtx x;
1115 int msize = GET_MODE_SIZE (mode);
1116 int xsize = GET_MODE_SIZE (GET_MODE (x));
1117 int offset = 0;
1119 if (GET_MODE (x) == mode)
1120 return x;
1122 /* MODE must occupy no more words than the mode of X. */
1123 if (GET_MODE (x) != VOIDmode
1124 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1125 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1126 return 0;
1128 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1129 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1130 && GET_MODE (x) != VOIDmode && msize > xsize)
1131 return 0;
1133 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1135 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1136 && (GET_MODE_CLASS (mode) == MODE_INT
1137 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1139 /* If we are getting the low-order part of something that has been
1140 sign- or zero-extended, we can either just use the object being
1141 extended or make a narrower extension. If we want an even smaller
1142 piece than the size of the object being extended, call ourselves
1143 recursively.
1145 This case is used mostly by combine and cse. */
1147 if (GET_MODE (XEXP (x, 0)) == mode)
1148 return XEXP (x, 0);
1149 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1150 return gen_lowpart_common (mode, XEXP (x, 0));
1151 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1152 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1154 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1155 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1156 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1157 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1158 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1159 && GET_MODE (x) == VOIDmode)
1160 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1161 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1162 from the low-order part of the constant. */
1163 else if ((GET_MODE_CLASS (mode) == MODE_INT
1164 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1165 && GET_MODE (x) == VOIDmode
1166 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1168 /* If MODE is twice the host word size, X is already the desired
1169 representation. Otherwise, if MODE is wider than a word, we can't
1170 do this. If MODE is exactly a word, return just one CONST_INT. */
1172 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1173 return x;
1174 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1175 return 0;
1176 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1177 return (GET_CODE (x) == CONST_INT ? x
1178 : GEN_INT (CONST_DOUBLE_LOW (x)));
1179 else
1181 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1182 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1183 : CONST_DOUBLE_LOW (x));
1185 /* Sign extend to HOST_WIDE_INT. */
1186 val = trunc_int_for_mode (val, mode);
1188 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1189 : GEN_INT (val));
1193 /* The floating-point emulator can handle all conversions between
1194 FP and integer operands. This simplifies reload because it
1195 doesn't have to deal with constructs like (subreg:DI
1196 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1197 /* Single-precision floats are always 32-bits and double-precision
1198 floats are always 64-bits. */
1200 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1201 && GET_MODE_BITSIZE (mode) == 32
1202 && GET_CODE (x) == CONST_INT)
1204 REAL_VALUE_TYPE r;
1205 long i = INTVAL (x);
1207 real_from_target (&r, &i, mode);
1208 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1210 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1211 && GET_MODE_BITSIZE (mode) == 64
1212 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1213 && GET_MODE (x) == VOIDmode)
1215 REAL_VALUE_TYPE r;
1216 HOST_WIDE_INT low, high;
1217 long i[2];
1219 if (GET_CODE (x) == CONST_INT)
1221 low = INTVAL (x);
1222 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1224 else
1226 low = CONST_DOUBLE_LOW (x);
1227 high = CONST_DOUBLE_HIGH (x);
1230 if (HOST_BITS_PER_WIDE_INT > 32)
1231 high = low >> 31 >> 1;
1233 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1234 target machine. */
1235 if (WORDS_BIG_ENDIAN)
1236 i[0] = high, i[1] = low;
1237 else
1238 i[0] = low, i[1] = high;
1240 real_from_target (&r, i, mode);
1241 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1243 else if ((GET_MODE_CLASS (mode) == MODE_INT
1244 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1245 && GET_CODE (x) == CONST_DOUBLE
1246 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1248 REAL_VALUE_TYPE r;
1249 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1250 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1252 /* Convert 'r' into an array of four 32-bit words in target word
1253 order. */
1254 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1255 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1257 case 32:
1258 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1259 i[1] = 0;
1260 i[2] = 0;
1261 i[3 - 3 * endian] = 0;
1262 break;
1263 case 64:
1264 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1265 i[2 - 2 * endian] = 0;
1266 i[3 - 2 * endian] = 0;
1267 break;
1268 case 96:
1269 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1270 i[3 - 3 * endian] = 0;
1271 break;
1272 case 128:
1273 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1274 break;
1275 default:
1276 abort ();
1278 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1279 and return it. */
1280 #if HOST_BITS_PER_WIDE_INT == 32
1281 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1282 #else
1283 if (HOST_BITS_PER_WIDE_INT != 64)
1284 abort ();
1286 return immed_double_const ((((unsigned long) i[3 * endian])
1287 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1288 (((unsigned long) i[2 - endian])
1289 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1290 mode);
1291 #endif
1294 /* Otherwise, we can't do this. */
1295 return 0;
1298 /* Return the real part (which has mode MODE) of a complex value X.
1299 This always comes at the low address in memory. */
1302 gen_realpart (mode, x)
1303 enum machine_mode mode;
1304 rtx x;
1306 if (WORDS_BIG_ENDIAN
1307 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1308 && REG_P (x)
1309 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1310 internal_error
1311 ("can't access real part of complex value in hard register");
1312 else if (WORDS_BIG_ENDIAN)
1313 return gen_highpart (mode, x);
1314 else
1315 return gen_lowpart (mode, x);
1318 /* Return the imaginary part (which has mode MODE) of a complex value X.
1319 This always comes at the high address in memory. */
1322 gen_imagpart (mode, x)
1323 enum machine_mode mode;
1324 rtx x;
1326 if (WORDS_BIG_ENDIAN)
1327 return gen_lowpart (mode, x);
1328 else if (! WORDS_BIG_ENDIAN
1329 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1330 && REG_P (x)
1331 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1332 internal_error
1333 ("can't access imaginary part of complex value in hard register");
1334 else
1335 return gen_highpart (mode, x);
1338 /* Return 1 iff X, assumed to be a SUBREG,
1339 refers to the real part of the complex value in its containing reg.
1340 Complex values are always stored with the real part in the first word,
1341 regardless of WORDS_BIG_ENDIAN. */
1344 subreg_realpart_p (x)
1345 rtx x;
1347 if (GET_CODE (x) != SUBREG)
1348 abort ();
1350 return ((unsigned int) SUBREG_BYTE (x)
1351 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1354 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1355 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1356 least-significant part of X.
1357 MODE specifies how big a part of X to return;
1358 it usually should not be larger than a word.
1359 If X is a MEM whose address is a QUEUED, the value may be so also. */
1362 gen_lowpart (mode, x)
1363 enum machine_mode mode;
1364 rtx x;
1366 rtx result = gen_lowpart_common (mode, x);
1368 if (result)
1369 return result;
1370 else if (GET_CODE (x) == REG)
1372 /* Must be a hard reg that's not valid in MODE. */
1373 result = gen_lowpart_common (mode, copy_to_reg (x));
1374 if (result == 0)
1375 abort ();
1376 return result;
1378 else if (GET_CODE (x) == MEM)
1380 /* The only additional case we can do is MEM. */
1381 int offset = 0;
1383 /* The following exposes the use of "x" to CSE. */
1384 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1385 && SCALAR_INT_MODE_P (GET_MODE (x))
1386 && ! no_new_pseudos)
1387 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1389 if (WORDS_BIG_ENDIAN)
1390 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1391 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1393 if (BYTES_BIG_ENDIAN)
1394 /* Adjust the address so that the address-after-the-data
1395 is unchanged. */
1396 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1397 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1399 return adjust_address (x, mode, offset);
1401 else if (GET_CODE (x) == ADDRESSOF)
1402 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1403 else
1404 abort ();
1407 /* Like `gen_lowpart', but refer to the most significant part.
1408 This is used to access the imaginary part of a complex number. */
1411 gen_highpart (mode, x)
1412 enum machine_mode mode;
1413 rtx x;
1415 unsigned int msize = GET_MODE_SIZE (mode);
1416 rtx result;
1418 /* This case loses if X is a subreg. To catch bugs early,
1419 complain if an invalid MODE is used even in other cases. */
1420 if (msize > UNITS_PER_WORD
1421 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1422 abort ();
1424 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1425 subreg_highpart_offset (mode, GET_MODE (x)));
1427 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1428 the target if we have a MEM. gen_highpart must return a valid operand,
1429 emitting code if necessary to do so. */
1430 if (result != NULL_RTX && GET_CODE (result) == MEM)
1431 result = validize_mem (result);
1433 if (!result)
1434 abort ();
1435 return result;
1438 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1439 be VOIDmode constant. */
1441 gen_highpart_mode (outermode, innermode, exp)
1442 enum machine_mode outermode, innermode;
1443 rtx exp;
1445 if (GET_MODE (exp) != VOIDmode)
1447 if (GET_MODE (exp) != innermode)
1448 abort ();
1449 return gen_highpart (outermode, exp);
1451 return simplify_gen_subreg (outermode, exp, innermode,
1452 subreg_highpart_offset (outermode, innermode));
1455 /* Return offset in bytes to get OUTERMODE low part
1456 of the value in mode INNERMODE stored in memory in target format. */
1458 unsigned int
1459 subreg_lowpart_offset (outermode, innermode)
1460 enum machine_mode outermode, innermode;
1462 unsigned int offset = 0;
1463 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1465 if (difference > 0)
1467 if (WORDS_BIG_ENDIAN)
1468 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1469 if (BYTES_BIG_ENDIAN)
1470 offset += difference % UNITS_PER_WORD;
1473 return offset;
1476 /* Return offset in bytes to get OUTERMODE high part
1477 of the value in mode INNERMODE stored in memory in target format. */
1478 unsigned int
1479 subreg_highpart_offset (outermode, innermode)
1480 enum machine_mode outermode, innermode;
1482 unsigned int offset = 0;
1483 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1485 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1486 abort ();
1488 if (difference > 0)
1490 if (! WORDS_BIG_ENDIAN)
1491 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1492 if (! BYTES_BIG_ENDIAN)
1493 offset += difference % UNITS_PER_WORD;
1496 return offset;
1499 /* Return 1 iff X, assumed to be a SUBREG,
1500 refers to the least significant part of its containing reg.
1501 If X is not a SUBREG, always return 1 (it is its own low part!). */
1504 subreg_lowpart_p (x)
1505 rtx x;
1507 if (GET_CODE (x) != SUBREG)
1508 return 1;
1509 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1510 return 0;
1512 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1513 == SUBREG_BYTE (x));
1517 /* Helper routine for all the constant cases of operand_subword.
1518 Some places invoke this directly. */
1521 constant_subword (op, offset, mode)
1522 rtx op;
1523 int offset;
1524 enum machine_mode mode;
1526 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1527 HOST_WIDE_INT val;
1529 /* If OP is already an integer word, return it. */
1530 if (GET_MODE_CLASS (mode) == MODE_INT
1531 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1532 return op;
1534 /* The output is some bits, the width of the target machine's word.
1535 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1536 host can't. */
1537 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1538 && GET_MODE_CLASS (mode) == MODE_FLOAT
1539 && GET_MODE_BITSIZE (mode) == 64
1540 && GET_CODE (op) == CONST_DOUBLE)
1542 long k[2];
1543 REAL_VALUE_TYPE rv;
1545 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1546 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1548 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1549 which the words are written depends on the word endianness.
1550 ??? This is a potential portability problem and should
1551 be fixed at some point.
1553 We must exercise caution with the sign bit. By definition there
1554 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1555 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1556 So we explicitly mask and sign-extend as necessary. */
1557 if (BITS_PER_WORD == 32)
1559 val = k[offset];
1560 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1561 return GEN_INT (val);
1563 #if HOST_BITS_PER_WIDE_INT >= 64
1564 else if (BITS_PER_WORD >= 64 && offset == 0)
1566 val = k[! WORDS_BIG_ENDIAN];
1567 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1568 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1569 return GEN_INT (val);
1571 #endif
1572 else if (BITS_PER_WORD == 16)
1574 val = k[offset >> 1];
1575 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1576 val >>= 16;
1577 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1578 return GEN_INT (val);
1580 else
1581 abort ();
1583 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1584 && GET_MODE_CLASS (mode) == MODE_FLOAT
1585 && GET_MODE_BITSIZE (mode) > 64
1586 && GET_CODE (op) == CONST_DOUBLE)
1588 long k[4];
1589 REAL_VALUE_TYPE rv;
1591 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1592 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1594 if (BITS_PER_WORD == 32)
1596 val = k[offset];
1597 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1598 return GEN_INT (val);
1600 #if HOST_BITS_PER_WIDE_INT >= 64
1601 else if (BITS_PER_WORD >= 64 && offset <= 1)
1603 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1604 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1605 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1606 return GEN_INT (val);
1608 #endif
1609 else
1610 abort ();
1613 /* Single word float is a little harder, since single- and double-word
1614 values often do not have the same high-order bits. We have already
1615 verified that we want the only defined word of the single-word value. */
1616 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1617 && GET_MODE_BITSIZE (mode) == 32
1618 && GET_CODE (op) == CONST_DOUBLE)
1620 long l;
1621 REAL_VALUE_TYPE rv;
1623 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1624 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1626 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1627 val = l;
1628 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1630 if (BITS_PER_WORD == 16)
1632 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1633 val >>= 16;
1634 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1637 return GEN_INT (val);
1640 /* The only remaining cases that we can handle are integers.
1641 Convert to proper endianness now since these cases need it.
1642 At this point, offset == 0 means the low-order word.
1644 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1645 in general. However, if OP is (const_int 0), we can just return
1646 it for any word. */
1648 if (op == const0_rtx)
1649 return op;
1651 if (GET_MODE_CLASS (mode) != MODE_INT
1652 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1653 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1654 return 0;
1656 if (WORDS_BIG_ENDIAN)
1657 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1659 /* Find out which word on the host machine this value is in and get
1660 it from the constant. */
1661 val = (offset / size_ratio == 0
1662 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1663 : (GET_CODE (op) == CONST_INT
1664 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1666 /* Get the value we want into the low bits of val. */
1667 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1668 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1670 val = trunc_int_for_mode (val, word_mode);
1672 return GEN_INT (val);
1675 /* Return subword OFFSET of operand OP.
1676 The word number, OFFSET, is interpreted as the word number starting
1677 at the low-order address. OFFSET 0 is the low-order word if not
1678 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1680 If we cannot extract the required word, we return zero. Otherwise,
1681 an rtx corresponding to the requested word will be returned.
1683 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1684 reload has completed, a valid address will always be returned. After
1685 reload, if a valid address cannot be returned, we return zero.
1687 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1688 it is the responsibility of the caller.
1690 MODE is the mode of OP in case it is a CONST_INT.
1692 ??? This is still rather broken for some cases. The problem for the
1693 moment is that all callers of this thing provide no 'goal mode' to
1694 tell us to work with. This exists because all callers were written
1695 in a word based SUBREG world.
1696 Now use of this function can be deprecated by simplify_subreg in most
1697 cases.
1701 operand_subword (op, offset, validate_address, mode)
1702 rtx op;
1703 unsigned int offset;
1704 int validate_address;
1705 enum machine_mode mode;
1707 if (mode == VOIDmode)
1708 mode = GET_MODE (op);
1710 if (mode == VOIDmode)
1711 abort ();
1713 /* If OP is narrower than a word, fail. */
1714 if (mode != BLKmode
1715 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1716 return 0;
1718 /* If we want a word outside OP, return zero. */
1719 if (mode != BLKmode
1720 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1721 return const0_rtx;
1723 /* Form a new MEM at the requested address. */
1724 if (GET_CODE (op) == MEM)
1726 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1728 if (! validate_address)
1729 return new;
1731 else if (reload_completed)
1733 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1734 return 0;
1736 else
1737 return replace_equiv_address (new, XEXP (new, 0));
1740 /* Rest can be handled by simplify_subreg. */
1741 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1744 /* Similar to `operand_subword', but never return 0. If we can't extract
1745 the required subword, put OP into a register and try again. If that fails,
1746 abort. We always validate the address in this case.
1748 MODE is the mode of OP, in case it is CONST_INT. */
1751 operand_subword_force (op, offset, mode)
1752 rtx op;
1753 unsigned int offset;
1754 enum machine_mode mode;
1756 rtx result = operand_subword (op, offset, 1, mode);
1758 if (result)
1759 return result;
1761 if (mode != BLKmode && mode != VOIDmode)
1763 /* If this is a register which can not be accessed by words, copy it
1764 to a pseudo register. */
1765 if (GET_CODE (op) == REG)
1766 op = copy_to_reg (op);
1767 else
1768 op = force_reg (mode, op);
1771 result = operand_subword (op, offset, 1, mode);
1772 if (result == 0)
1773 abort ();
1775 return result;
1778 /* Given a compare instruction, swap the operands.
1779 A test instruction is changed into a compare of 0 against the operand. */
1781 void
1782 reverse_comparison (insn)
1783 rtx insn;
1785 rtx body = PATTERN (insn);
1786 rtx comp;
1788 if (GET_CODE (body) == SET)
1789 comp = SET_SRC (body);
1790 else
1791 comp = SET_SRC (XVECEXP (body, 0, 0));
1793 if (GET_CODE (comp) == COMPARE)
1795 rtx op0 = XEXP (comp, 0);
1796 rtx op1 = XEXP (comp, 1);
1797 XEXP (comp, 0) = op1;
1798 XEXP (comp, 1) = op0;
1800 else
1802 rtx new = gen_rtx_COMPARE (VOIDmode,
1803 CONST0_RTX (GET_MODE (comp)), comp);
1804 if (GET_CODE (body) == SET)
1805 SET_SRC (body) = new;
1806 else
1807 SET_SRC (XVECEXP (body, 0, 0)) = new;
1811 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1812 or (2) a component ref of something variable. Represent the later with
1813 a NULL expression. */
1815 static tree
1816 component_ref_for_mem_expr (ref)
1817 tree ref;
1819 tree inner = TREE_OPERAND (ref, 0);
1821 if (TREE_CODE (inner) == COMPONENT_REF)
1822 inner = component_ref_for_mem_expr (inner);
1823 else
1825 tree placeholder_ptr = 0;
1827 /* Now remove any conversions: they don't change what the underlying
1828 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1829 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1830 || TREE_CODE (inner) == NON_LVALUE_EXPR
1831 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1832 || TREE_CODE (inner) == SAVE_EXPR
1833 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1834 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1835 inner = find_placeholder (inner, &placeholder_ptr);
1836 else
1837 inner = TREE_OPERAND (inner, 0);
1839 if (! DECL_P (inner))
1840 inner = NULL_TREE;
1843 if (inner == TREE_OPERAND (ref, 0))
1844 return ref;
1845 else
1846 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1847 TREE_OPERAND (ref, 1));
1850 /* Given REF, a MEM, and T, either the type of X or the expression
1851 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1852 if we are making a new object of this type. BITPOS is nonzero if
1853 there is an offset outstanding on T that will be applied later. */
1855 void
1856 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1857 rtx ref;
1858 tree t;
1859 int objectp;
1860 HOST_WIDE_INT bitpos;
1862 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1863 tree expr = MEM_EXPR (ref);
1864 rtx offset = MEM_OFFSET (ref);
1865 rtx size = MEM_SIZE (ref);
1866 unsigned int align = MEM_ALIGN (ref);
1867 HOST_WIDE_INT apply_bitpos = 0;
1868 tree type;
1870 /* It can happen that type_for_mode was given a mode for which there
1871 is no language-level type. In which case it returns NULL, which
1872 we can see here. */
1873 if (t == NULL_TREE)
1874 return;
1876 type = TYPE_P (t) ? t : TREE_TYPE (t);
1878 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1879 wrong answer, as it assumes that DECL_RTL already has the right alias
1880 info. Callers should not set DECL_RTL until after the call to
1881 set_mem_attributes. */
1882 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1883 abort ();
1885 /* Get the alias set from the expression or type (perhaps using a
1886 front-end routine) and use it. */
1887 alias = get_alias_set (t);
1889 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1890 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1891 RTX_UNCHANGING_P (ref)
1892 |= ((lang_hooks.honor_readonly
1893 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1894 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1896 /* If we are making an object of this type, or if this is a DECL, we know
1897 that it is a scalar if the type is not an aggregate. */
1898 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1899 MEM_SCALAR_P (ref) = 1;
1901 /* We can set the alignment from the type if we are making an object,
1902 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1903 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1904 align = MAX (align, TYPE_ALIGN (type));
1906 /* If the size is known, we can set that. */
1907 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1908 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1910 /* If T is not a type, we may be able to deduce some more information about
1911 the expression. */
1912 if (! TYPE_P (t))
1914 maybe_set_unchanging (ref, t);
1915 if (TREE_THIS_VOLATILE (t))
1916 MEM_VOLATILE_P (ref) = 1;
1918 /* Now remove any conversions: they don't change what the underlying
1919 object is. Likewise for SAVE_EXPR. */
1920 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1921 || TREE_CODE (t) == NON_LVALUE_EXPR
1922 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1923 || TREE_CODE (t) == SAVE_EXPR)
1924 t = TREE_OPERAND (t, 0);
1926 /* If this expression can't be addressed (e.g., it contains a reference
1927 to a non-addressable field), show we don't change its alias set. */
1928 if (! can_address_p (t))
1929 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1931 /* If this is a decl, set the attributes of the MEM from it. */
1932 if (DECL_P (t))
1934 expr = t;
1935 offset = const0_rtx;
1936 apply_bitpos = bitpos;
1937 size = (DECL_SIZE_UNIT (t)
1938 && host_integerp (DECL_SIZE_UNIT (t), 1)
1939 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1940 align = DECL_ALIGN (t);
1943 /* If this is a constant, we know the alignment. */
1944 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1946 align = TYPE_ALIGN (type);
1947 #ifdef CONSTANT_ALIGNMENT
1948 align = CONSTANT_ALIGNMENT (t, align);
1949 #endif
1952 /* If this is a field reference and not a bit-field, record it. */
1953 /* ??? There is some information that can be gleened from bit-fields,
1954 such as the word offset in the structure that might be modified.
1955 But skip it for now. */
1956 else if (TREE_CODE (t) == COMPONENT_REF
1957 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1959 expr = component_ref_for_mem_expr (t);
1960 offset = const0_rtx;
1961 apply_bitpos = bitpos;
1962 /* ??? Any reason the field size would be different than
1963 the size we got from the type? */
1966 /* If this is an array reference, look for an outer field reference. */
1967 else if (TREE_CODE (t) == ARRAY_REF)
1969 tree off_tree = size_zero_node;
1973 tree index = TREE_OPERAND (t, 1);
1974 tree array = TREE_OPERAND (t, 0);
1975 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1976 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1977 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1979 /* We assume all arrays have sizes that are a multiple of a byte.
1980 First subtract the lower bound, if any, in the type of the
1981 index, then convert to sizetype and multiply by the size of the
1982 array element. */
1983 if (low_bound != 0 && ! integer_zerop (low_bound))
1984 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1985 index, low_bound));
1987 /* If the index has a self-referential type, pass it to a
1988 WITH_RECORD_EXPR; if the component size is, pass our
1989 component to one. */
1990 if (! TREE_CONSTANT (index)
1991 && contains_placeholder_p (index))
1992 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1993 if (! TREE_CONSTANT (unit_size)
1994 && contains_placeholder_p (unit_size))
1995 unit_size = build (WITH_RECORD_EXPR, sizetype,
1996 unit_size, array);
1998 off_tree
1999 = fold (build (PLUS_EXPR, sizetype,
2000 fold (build (MULT_EXPR, sizetype,
2001 index,
2002 unit_size)),
2003 off_tree));
2004 t = TREE_OPERAND (t, 0);
2006 while (TREE_CODE (t) == ARRAY_REF);
2008 if (DECL_P (t))
2010 expr = t;
2011 offset = NULL;
2012 if (host_integerp (off_tree, 1))
2014 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2015 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2016 align = DECL_ALIGN (t);
2017 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2018 align = aoff;
2019 offset = GEN_INT (ioff);
2020 apply_bitpos = bitpos;
2023 else if (TREE_CODE (t) == COMPONENT_REF)
2025 expr = component_ref_for_mem_expr (t);
2026 if (host_integerp (off_tree, 1))
2028 offset = GEN_INT (tree_low_cst (off_tree, 1));
2029 apply_bitpos = bitpos;
2031 /* ??? Any reason the field size would be different than
2032 the size we got from the type? */
2034 else if (flag_argument_noalias > 1
2035 && TREE_CODE (t) == INDIRECT_REF
2036 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2038 expr = t;
2039 offset = NULL;
2043 /* If this is a Fortran indirect argument reference, record the
2044 parameter decl. */
2045 else if (flag_argument_noalias > 1
2046 && TREE_CODE (t) == INDIRECT_REF
2047 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2049 expr = t;
2050 offset = NULL;
2054 /* If we modified OFFSET based on T, then subtract the outstanding
2055 bit position offset. Similarly, increase the size of the accessed
2056 object to contain the negative offset. */
2057 if (apply_bitpos)
2059 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2060 if (size)
2061 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2064 /* Now set the attributes we computed above. */
2065 MEM_ATTRS (ref)
2066 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2068 /* If this is already known to be a scalar or aggregate, we are done. */
2069 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2070 return;
2072 /* If it is a reference into an aggregate, this is part of an aggregate.
2073 Otherwise we don't know. */
2074 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2075 || TREE_CODE (t) == ARRAY_RANGE_REF
2076 || TREE_CODE (t) == BIT_FIELD_REF)
2077 MEM_IN_STRUCT_P (ref) = 1;
2080 void
2081 set_mem_attributes (ref, t, objectp)
2082 rtx ref;
2083 tree t;
2084 int objectp;
2086 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2089 /* Set the decl for MEM to DECL. */
2091 void
2092 set_mem_attrs_from_reg (mem, reg)
2093 rtx mem;
2094 rtx reg;
2096 MEM_ATTRS (mem)
2097 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2098 GEN_INT (REG_OFFSET (reg)),
2099 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2102 /* Set the alias set of MEM to SET. */
2104 void
2105 set_mem_alias_set (mem, set)
2106 rtx mem;
2107 HOST_WIDE_INT set;
2109 #ifdef ENABLE_CHECKING
2110 /* If the new and old alias sets don't conflict, something is wrong. */
2111 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2112 abort ();
2113 #endif
2115 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2116 MEM_SIZE (mem), MEM_ALIGN (mem),
2117 GET_MODE (mem));
2120 /* Set the alignment of MEM to ALIGN bits. */
2122 void
2123 set_mem_align (mem, align)
2124 rtx mem;
2125 unsigned int align;
2127 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2128 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2129 GET_MODE (mem));
2132 /* Set the expr for MEM to EXPR. */
2134 void
2135 set_mem_expr (mem, expr)
2136 rtx mem;
2137 tree expr;
2139 MEM_ATTRS (mem)
2140 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2141 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2144 /* Set the offset of MEM to OFFSET. */
2146 void
2147 set_mem_offset (mem, offset)
2148 rtx mem, offset;
2150 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2151 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2152 GET_MODE (mem));
2155 /* Set the size of MEM to SIZE. */
2157 void
2158 set_mem_size (mem, size)
2159 rtx mem, size;
2161 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2162 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2163 GET_MODE (mem));
2166 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2167 and its address changed to ADDR. (VOIDmode means don't change the mode.
2168 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2169 returned memory location is required to be valid. The memory
2170 attributes are not changed. */
2172 static rtx
2173 change_address_1 (memref, mode, addr, validate)
2174 rtx memref;
2175 enum machine_mode mode;
2176 rtx addr;
2177 int validate;
2179 rtx new;
2181 if (GET_CODE (memref) != MEM)
2182 abort ();
2183 if (mode == VOIDmode)
2184 mode = GET_MODE (memref);
2185 if (addr == 0)
2186 addr = XEXP (memref, 0);
2188 if (validate)
2190 if (reload_in_progress || reload_completed)
2192 if (! memory_address_p (mode, addr))
2193 abort ();
2195 else
2196 addr = memory_address (mode, addr);
2199 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2200 return memref;
2202 new = gen_rtx_MEM (mode, addr);
2203 MEM_COPY_ATTRIBUTES (new, memref);
2204 return new;
2207 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2208 way we are changing MEMREF, so we only preserve the alias set. */
2211 change_address (memref, mode, addr)
2212 rtx memref;
2213 enum machine_mode mode;
2214 rtx addr;
2216 rtx new = change_address_1 (memref, mode, addr, 1);
2217 enum machine_mode mmode = GET_MODE (new);
2219 MEM_ATTRS (new)
2220 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2221 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2222 (mmode == BLKmode ? BITS_PER_UNIT
2223 : GET_MODE_ALIGNMENT (mmode)),
2224 mmode);
2226 return new;
2229 /* Return a memory reference like MEMREF, but with its mode changed
2230 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2231 nonzero, the memory address is forced to be valid.
2232 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2233 and caller is responsible for adjusting MEMREF base register. */
2236 adjust_address_1 (memref, mode, offset, validate, adjust)
2237 rtx memref;
2238 enum machine_mode mode;
2239 HOST_WIDE_INT offset;
2240 int validate, adjust;
2242 rtx addr = XEXP (memref, 0);
2243 rtx new;
2244 rtx memoffset = MEM_OFFSET (memref);
2245 rtx size = 0;
2246 unsigned int memalign = MEM_ALIGN (memref);
2248 /* ??? Prefer to create garbage instead of creating shared rtl.
2249 This may happen even if offset is nonzero -- consider
2250 (plus (plus reg reg) const_int) -- so do this always. */
2251 addr = copy_rtx (addr);
2253 if (adjust)
2255 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2256 object, we can merge it into the LO_SUM. */
2257 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2258 && offset >= 0
2259 && (unsigned HOST_WIDE_INT) offset
2260 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2261 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2262 plus_constant (XEXP (addr, 1), offset));
2263 else
2264 addr = plus_constant (addr, offset);
2267 new = change_address_1 (memref, mode, addr, validate);
2269 /* Compute the new values of the memory attributes due to this adjustment.
2270 We add the offsets and update the alignment. */
2271 if (memoffset)
2272 memoffset = GEN_INT (offset + INTVAL (memoffset));
2274 /* Compute the new alignment by taking the MIN of the alignment and the
2275 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2276 if zero. */
2277 if (offset != 0)
2278 memalign
2279 = MIN (memalign,
2280 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2282 /* We can compute the size in a number of ways. */
2283 if (GET_MODE (new) != BLKmode)
2284 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2285 else if (MEM_SIZE (memref))
2286 size = plus_constant (MEM_SIZE (memref), -offset);
2288 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2289 memoffset, size, memalign, GET_MODE (new));
2291 /* At some point, we should validate that this offset is within the object,
2292 if all the appropriate values are known. */
2293 return new;
2296 /* Return a memory reference like MEMREF, but with its mode changed
2297 to MODE and its address changed to ADDR, which is assumed to be
2298 MEMREF offseted by OFFSET bytes. If VALIDATE is
2299 nonzero, the memory address is forced to be valid. */
2302 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2303 rtx memref;
2304 enum machine_mode mode;
2305 rtx addr;
2306 HOST_WIDE_INT offset;
2307 int validate;
2309 memref = change_address_1 (memref, VOIDmode, addr, validate);
2310 return adjust_address_1 (memref, mode, offset, validate, 0);
2313 /* Return a memory reference like MEMREF, but whose address is changed by
2314 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2315 known to be in OFFSET (possibly 1). */
2318 offset_address (memref, offset, pow2)
2319 rtx memref;
2320 rtx offset;
2321 unsigned HOST_WIDE_INT pow2;
2323 rtx new, addr = XEXP (memref, 0);
2325 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2327 /* At this point we don't know _why_ the address is invalid. It
2328 could have secondary memory refereces, multiplies or anything.
2330 However, if we did go and rearrange things, we can wind up not
2331 being able to recognize the magic around pic_offset_table_rtx.
2332 This stuff is fragile, and is yet another example of why it is
2333 bad to expose PIC machinery too early. */
2334 if (! memory_address_p (GET_MODE (memref), new)
2335 && GET_CODE (addr) == PLUS
2336 && XEXP (addr, 0) == pic_offset_table_rtx)
2338 addr = force_reg (GET_MODE (addr), addr);
2339 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2342 update_temp_slot_address (XEXP (memref, 0), new);
2343 new = change_address_1 (memref, VOIDmode, new, 1);
2345 /* Update the alignment to reflect the offset. Reset the offset, which
2346 we don't know. */
2347 MEM_ATTRS (new)
2348 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2349 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2350 GET_MODE (new));
2351 return new;
2354 /* Return a memory reference like MEMREF, but with its address changed to
2355 ADDR. The caller is asserting that the actual piece of memory pointed
2356 to is the same, just the form of the address is being changed, such as
2357 by putting something into a register. */
2360 replace_equiv_address (memref, addr)
2361 rtx memref;
2362 rtx addr;
2364 /* change_address_1 copies the memory attribute structure without change
2365 and that's exactly what we want here. */
2366 update_temp_slot_address (XEXP (memref, 0), addr);
2367 return change_address_1 (memref, VOIDmode, addr, 1);
2370 /* Likewise, but the reference is not required to be valid. */
2373 replace_equiv_address_nv (memref, addr)
2374 rtx memref;
2375 rtx addr;
2377 return change_address_1 (memref, VOIDmode, addr, 0);
2380 /* Return a memory reference like MEMREF, but with its mode widened to
2381 MODE and offset by OFFSET. This would be used by targets that e.g.
2382 cannot issue QImode memory operations and have to use SImode memory
2383 operations plus masking logic. */
2386 widen_memory_access (memref, mode, offset)
2387 rtx memref;
2388 enum machine_mode mode;
2389 HOST_WIDE_INT offset;
2391 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2392 tree expr = MEM_EXPR (new);
2393 rtx memoffset = MEM_OFFSET (new);
2394 unsigned int size = GET_MODE_SIZE (mode);
2396 /* If we don't know what offset we were at within the expression, then
2397 we can't know if we've overstepped the bounds. */
2398 if (! memoffset)
2399 expr = NULL_TREE;
2401 while (expr)
2403 if (TREE_CODE (expr) == COMPONENT_REF)
2405 tree field = TREE_OPERAND (expr, 1);
2407 if (! DECL_SIZE_UNIT (field))
2409 expr = NULL_TREE;
2410 break;
2413 /* Is the field at least as large as the access? If so, ok,
2414 otherwise strip back to the containing structure. */
2415 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2416 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2417 && INTVAL (memoffset) >= 0)
2418 break;
2420 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2422 expr = NULL_TREE;
2423 break;
2426 expr = TREE_OPERAND (expr, 0);
2427 memoffset = (GEN_INT (INTVAL (memoffset)
2428 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2429 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2430 / BITS_PER_UNIT)));
2432 /* Similarly for the decl. */
2433 else if (DECL_P (expr)
2434 && DECL_SIZE_UNIT (expr)
2435 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2436 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2437 && (! memoffset || INTVAL (memoffset) >= 0))
2438 break;
2439 else
2441 /* The widened memory access overflows the expression, which means
2442 that it could alias another expression. Zap it. */
2443 expr = NULL_TREE;
2444 break;
2448 if (! expr)
2449 memoffset = NULL_RTX;
2451 /* The widened memory may alias other stuff, so zap the alias set. */
2452 /* ??? Maybe use get_alias_set on any remaining expression. */
2454 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2455 MEM_ALIGN (new), mode);
2457 return new;
2460 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2463 gen_label_rtx ()
2465 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2466 NULL, label_num++, NULL);
2469 /* For procedure integration. */
2471 /* Install new pointers to the first and last insns in the chain.
2472 Also, set cur_insn_uid to one higher than the last in use.
2473 Used for an inline-procedure after copying the insn chain. */
2475 void
2476 set_new_first_and_last_insn (first, last)
2477 rtx first, last;
2479 rtx insn;
2481 first_insn = first;
2482 last_insn = last;
2483 cur_insn_uid = 0;
2485 for (insn = first; insn; insn = NEXT_INSN (insn))
2486 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2488 cur_insn_uid++;
2491 /* Set the range of label numbers found in the current function.
2492 This is used when belatedly compiling an inline function. */
2494 void
2495 set_new_first_and_last_label_num (first, last)
2496 int first, last;
2498 base_label_num = label_num;
2499 first_label_num = first;
2500 last_label_num = last;
2503 /* Set the last label number found in the current function.
2504 This is used when belatedly compiling an inline function. */
2506 void
2507 set_new_last_label_num (last)
2508 int last;
2510 base_label_num = label_num;
2511 last_label_num = last;
2514 /* Restore all variables describing the current status from the structure *P.
2515 This is used after a nested function. */
2517 void
2518 restore_emit_status (p)
2519 struct function *p ATTRIBUTE_UNUSED;
2521 last_label_num = 0;
2524 /* Go through all the RTL insn bodies and copy any invalid shared
2525 structure. This routine should only be called once. */
2527 void
2528 unshare_all_rtl (fndecl, insn)
2529 tree fndecl;
2530 rtx insn;
2532 tree decl;
2534 /* Make sure that virtual parameters are not shared. */
2535 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2536 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2538 /* Make sure that virtual stack slots are not shared. */
2539 unshare_all_decls (DECL_INITIAL (fndecl));
2541 /* Unshare just about everything else. */
2542 unshare_all_rtl_1 (insn);
2544 /* Make sure the addresses of stack slots found outside the insn chain
2545 (such as, in DECL_RTL of a variable) are not shared
2546 with the insn chain.
2548 This special care is necessary when the stack slot MEM does not
2549 actually appear in the insn chain. If it does appear, its address
2550 is unshared from all else at that point. */
2551 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2554 /* Go through all the RTL insn bodies and copy any invalid shared
2555 structure, again. This is a fairly expensive thing to do so it
2556 should be done sparingly. */
2558 void
2559 unshare_all_rtl_again (insn)
2560 rtx insn;
2562 rtx p;
2563 tree decl;
2565 for (p = insn; p; p = NEXT_INSN (p))
2566 if (INSN_P (p))
2568 reset_used_flags (PATTERN (p));
2569 reset_used_flags (REG_NOTES (p));
2570 reset_used_flags (LOG_LINKS (p));
2573 /* Make sure that virtual stack slots are not shared. */
2574 reset_used_decls (DECL_INITIAL (cfun->decl));
2576 /* Make sure that virtual parameters are not shared. */
2577 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2578 reset_used_flags (DECL_RTL (decl));
2580 reset_used_flags (stack_slot_list);
2582 unshare_all_rtl (cfun->decl, insn);
2585 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2586 Assumes the mark bits are cleared at entry. */
2588 static void
2589 unshare_all_rtl_1 (insn)
2590 rtx insn;
2592 for (; insn; insn = NEXT_INSN (insn))
2593 if (INSN_P (insn))
2595 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2596 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2597 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2601 /* Go through all virtual stack slots of a function and copy any
2602 shared structure. */
2603 static void
2604 unshare_all_decls (blk)
2605 tree blk;
2607 tree t;
2609 /* Copy shared decls. */
2610 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2611 if (DECL_RTL_SET_P (t))
2612 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2614 /* Now process sub-blocks. */
2615 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2616 unshare_all_decls (t);
2619 /* Go through all virtual stack slots of a function and mark them as
2620 not shared. */
2621 static void
2622 reset_used_decls (blk)
2623 tree blk;
2625 tree t;
2627 /* Mark decls. */
2628 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2629 if (DECL_RTL_SET_P (t))
2630 reset_used_flags (DECL_RTL (t));
2632 /* Now process sub-blocks. */
2633 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2634 reset_used_decls (t);
2637 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2638 placed in the result directly, rather than being copied. MAY_SHARE is
2639 either a MEM of an EXPR_LIST of MEMs. */
2642 copy_most_rtx (orig, may_share)
2643 rtx orig;
2644 rtx may_share;
2646 rtx copy;
2647 int i, j;
2648 RTX_CODE code;
2649 const char *format_ptr;
2651 if (orig == may_share
2652 || (GET_CODE (may_share) == EXPR_LIST
2653 && in_expr_list_p (may_share, orig)))
2654 return orig;
2656 code = GET_CODE (orig);
2658 switch (code)
2660 case REG:
2661 case QUEUED:
2662 case CONST_INT:
2663 case CONST_DOUBLE:
2664 case CONST_VECTOR:
2665 case SYMBOL_REF:
2666 case CODE_LABEL:
2667 case PC:
2668 case CC0:
2669 return orig;
2670 default:
2671 break;
2674 copy = rtx_alloc (code);
2675 PUT_MODE (copy, GET_MODE (orig));
2676 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2677 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2678 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2679 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2680 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2682 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2684 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2686 switch (*format_ptr++)
2688 case 'e':
2689 XEXP (copy, i) = XEXP (orig, i);
2690 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2691 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2692 break;
2694 case 'u':
2695 XEXP (copy, i) = XEXP (orig, i);
2696 break;
2698 case 'E':
2699 case 'V':
2700 XVEC (copy, i) = XVEC (orig, i);
2701 if (XVEC (orig, i) != NULL)
2703 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2704 for (j = 0; j < XVECLEN (copy, i); j++)
2705 XVECEXP (copy, i, j)
2706 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2708 break;
2710 case 'w':
2711 XWINT (copy, i) = XWINT (orig, i);
2712 break;
2714 case 'n':
2715 case 'i':
2716 XINT (copy, i) = XINT (orig, i);
2717 break;
2719 case 't':
2720 XTREE (copy, i) = XTREE (orig, i);
2721 break;
2723 case 's':
2724 case 'S':
2725 XSTR (copy, i) = XSTR (orig, i);
2726 break;
2728 case '0':
2729 /* Copy this through the wide int field; that's safest. */
2730 X0WINT (copy, i) = X0WINT (orig, i);
2731 break;
2733 default:
2734 abort ();
2737 return copy;
2740 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2741 Recursively does the same for subexpressions. */
2744 copy_rtx_if_shared (orig)
2745 rtx orig;
2747 rtx x = orig;
2748 int i;
2749 enum rtx_code code;
2750 const char *format_ptr;
2751 int copied = 0;
2753 if (x == 0)
2754 return 0;
2756 code = GET_CODE (x);
2758 /* These types may be freely shared. */
2760 switch (code)
2762 case REG:
2763 case QUEUED:
2764 case CONST_INT:
2765 case CONST_DOUBLE:
2766 case CONST_VECTOR:
2767 case SYMBOL_REF:
2768 case CODE_LABEL:
2769 case PC:
2770 case CC0:
2771 case SCRATCH:
2772 /* SCRATCH must be shared because they represent distinct values. */
2773 return x;
2775 case CONST:
2776 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2777 a LABEL_REF, it isn't sharable. */
2778 if (GET_CODE (XEXP (x, 0)) == PLUS
2779 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2780 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2781 return x;
2782 break;
2784 case INSN:
2785 case JUMP_INSN:
2786 case CALL_INSN:
2787 case NOTE:
2788 case BARRIER:
2789 /* The chain of insns is not being copied. */
2790 return x;
2792 case MEM:
2793 /* A MEM is allowed to be shared if its address is constant.
2795 We used to allow sharing of MEMs which referenced
2796 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2797 that can lose. instantiate_virtual_regs will not unshare
2798 the MEMs, and combine may change the structure of the address
2799 because it looks safe and profitable in one context, but
2800 in some other context it creates unrecognizable RTL. */
2801 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2802 return x;
2804 break;
2806 default:
2807 break;
2810 /* This rtx may not be shared. If it has already been seen,
2811 replace it with a copy of itself. */
2813 if (RTX_FLAG (x, used))
2815 rtx copy;
2817 copy = rtx_alloc (code);
2818 memcpy (copy, x,
2819 (sizeof (*copy) - sizeof (copy->fld)
2820 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2821 x = copy;
2822 copied = 1;
2824 RTX_FLAG (x, used) = 1;
2826 /* Now scan the subexpressions recursively.
2827 We can store any replaced subexpressions directly into X
2828 since we know X is not shared! Any vectors in X
2829 must be copied if X was copied. */
2831 format_ptr = GET_RTX_FORMAT (code);
2833 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2835 switch (*format_ptr++)
2837 case 'e':
2838 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2839 break;
2841 case 'E':
2842 if (XVEC (x, i) != NULL)
2844 int j;
2845 int len = XVECLEN (x, i);
2847 if (copied && len > 0)
2848 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2849 for (j = 0; j < len; j++)
2850 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2852 break;
2855 return x;
2858 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2859 to look for shared sub-parts. */
2861 void
2862 reset_used_flags (x)
2863 rtx x;
2865 int i, j;
2866 enum rtx_code code;
2867 const char *format_ptr;
2869 if (x == 0)
2870 return;
2872 code = GET_CODE (x);
2874 /* These types may be freely shared so we needn't do any resetting
2875 for them. */
2877 switch (code)
2879 case REG:
2880 case QUEUED:
2881 case CONST_INT:
2882 case CONST_DOUBLE:
2883 case CONST_VECTOR:
2884 case SYMBOL_REF:
2885 case CODE_LABEL:
2886 case PC:
2887 case CC0:
2888 return;
2890 case INSN:
2891 case JUMP_INSN:
2892 case CALL_INSN:
2893 case NOTE:
2894 case LABEL_REF:
2895 case BARRIER:
2896 /* The chain of insns is not being copied. */
2897 return;
2899 default:
2900 break;
2903 RTX_FLAG (x, used) = 0;
2905 format_ptr = GET_RTX_FORMAT (code);
2906 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2908 switch (*format_ptr++)
2910 case 'e':
2911 reset_used_flags (XEXP (x, i));
2912 break;
2914 case 'E':
2915 for (j = 0; j < XVECLEN (x, i); j++)
2916 reset_used_flags (XVECEXP (x, i, j));
2917 break;
2922 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2923 Return X or the rtx for the pseudo reg the value of X was copied into.
2924 OTHER must be valid as a SET_DEST. */
2927 make_safe_from (x, other)
2928 rtx x, other;
2930 while (1)
2931 switch (GET_CODE (other))
2933 case SUBREG:
2934 other = SUBREG_REG (other);
2935 break;
2936 case STRICT_LOW_PART:
2937 case SIGN_EXTEND:
2938 case ZERO_EXTEND:
2939 other = XEXP (other, 0);
2940 break;
2941 default:
2942 goto done;
2944 done:
2945 if ((GET_CODE (other) == MEM
2946 && ! CONSTANT_P (x)
2947 && GET_CODE (x) != REG
2948 && GET_CODE (x) != SUBREG)
2949 || (GET_CODE (other) == REG
2950 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2951 || reg_mentioned_p (other, x))))
2953 rtx temp = gen_reg_rtx (GET_MODE (x));
2954 emit_move_insn (temp, x);
2955 return temp;
2957 return x;
2960 /* Emission of insns (adding them to the doubly-linked list). */
2962 /* Return the first insn of the current sequence or current function. */
2965 get_insns ()
2967 return first_insn;
2970 /* Specify a new insn as the first in the chain. */
2972 void
2973 set_first_insn (insn)
2974 rtx insn;
2976 if (PREV_INSN (insn) != 0)
2977 abort ();
2978 first_insn = insn;
2981 /* Return the last insn emitted in current sequence or current function. */
2984 get_last_insn ()
2986 return last_insn;
2989 /* Specify a new insn as the last in the chain. */
2991 void
2992 set_last_insn (insn)
2993 rtx insn;
2995 if (NEXT_INSN (insn) != 0)
2996 abort ();
2997 last_insn = insn;
3000 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3003 get_last_insn_anywhere ()
3005 struct sequence_stack *stack;
3006 if (last_insn)
3007 return last_insn;
3008 for (stack = seq_stack; stack; stack = stack->next)
3009 if (stack->last != 0)
3010 return stack->last;
3011 return 0;
3014 /* Return the first nonnote insn emitted in current sequence or current
3015 function. This routine looks inside SEQUENCEs. */
3018 get_first_nonnote_insn ()
3020 rtx insn = first_insn;
3022 while (insn)
3024 insn = next_insn (insn);
3025 if (insn == 0 || GET_CODE (insn) != NOTE)
3026 break;
3029 return insn;
3032 /* Return the last nonnote insn emitted in current sequence or current
3033 function. This routine looks inside SEQUENCEs. */
3036 get_last_nonnote_insn ()
3038 rtx insn = last_insn;
3040 while (insn)
3042 insn = previous_insn (insn);
3043 if (insn == 0 || GET_CODE (insn) != NOTE)
3044 break;
3047 return insn;
3050 /* Return a number larger than any instruction's uid in this function. */
3053 get_max_uid ()
3055 return cur_insn_uid;
3058 /* Renumber instructions so that no instruction UIDs are wasted. */
3060 void
3061 renumber_insns (stream)
3062 FILE *stream;
3064 rtx insn;
3066 /* If we're not supposed to renumber instructions, don't. */
3067 if (!flag_renumber_insns)
3068 return;
3070 /* If there aren't that many instructions, then it's not really
3071 worth renumbering them. */
3072 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3073 return;
3075 cur_insn_uid = 1;
3077 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3079 if (stream)
3080 fprintf (stream, "Renumbering insn %d to %d\n",
3081 INSN_UID (insn), cur_insn_uid);
3082 INSN_UID (insn) = cur_insn_uid++;
3086 /* Return the next insn. If it is a SEQUENCE, return the first insn
3087 of the sequence. */
3090 next_insn (insn)
3091 rtx insn;
3093 if (insn)
3095 insn = NEXT_INSN (insn);
3096 if (insn && GET_CODE (insn) == INSN
3097 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3098 insn = XVECEXP (PATTERN (insn), 0, 0);
3101 return insn;
3104 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3105 of the sequence. */
3108 previous_insn (insn)
3109 rtx insn;
3111 if (insn)
3113 insn = PREV_INSN (insn);
3114 if (insn && GET_CODE (insn) == INSN
3115 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3116 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3119 return insn;
3122 /* Return the next insn after INSN that is not a NOTE. This routine does not
3123 look inside SEQUENCEs. */
3126 next_nonnote_insn (insn)
3127 rtx insn;
3129 while (insn)
3131 insn = NEXT_INSN (insn);
3132 if (insn == 0 || GET_CODE (insn) != NOTE)
3133 break;
3136 return insn;
3139 /* Return the previous insn before INSN that is not a NOTE. This routine does
3140 not look inside SEQUENCEs. */
3143 prev_nonnote_insn (insn)
3144 rtx insn;
3146 while (insn)
3148 insn = PREV_INSN (insn);
3149 if (insn == 0 || GET_CODE (insn) != NOTE)
3150 break;
3153 return insn;
3156 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3157 or 0, if there is none. This routine does not look inside
3158 SEQUENCEs. */
3161 next_real_insn (insn)
3162 rtx insn;
3164 while (insn)
3166 insn = NEXT_INSN (insn);
3167 if (insn == 0 || GET_CODE (insn) == INSN
3168 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3169 break;
3172 return insn;
3175 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3176 or 0, if there is none. This routine does not look inside
3177 SEQUENCEs. */
3180 prev_real_insn (insn)
3181 rtx insn;
3183 while (insn)
3185 insn = PREV_INSN (insn);
3186 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3187 || GET_CODE (insn) == JUMP_INSN)
3188 break;
3191 return insn;
3194 /* Find the next insn after INSN that really does something. This routine
3195 does not look inside SEQUENCEs. Until reload has completed, this is the
3196 same as next_real_insn. */
3199 active_insn_p (insn)
3200 rtx insn;
3202 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3203 || (GET_CODE (insn) == INSN
3204 && (! reload_completed
3205 || (GET_CODE (PATTERN (insn)) != USE
3206 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3210 next_active_insn (insn)
3211 rtx insn;
3213 while (insn)
3215 insn = NEXT_INSN (insn);
3216 if (insn == 0 || active_insn_p (insn))
3217 break;
3220 return insn;
3223 /* Find the last insn before INSN that really does something. This routine
3224 does not look inside SEQUENCEs. Until reload has completed, this is the
3225 same as prev_real_insn. */
3228 prev_active_insn (insn)
3229 rtx insn;
3231 while (insn)
3233 insn = PREV_INSN (insn);
3234 if (insn == 0 || active_insn_p (insn))
3235 break;
3238 return insn;
3241 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3244 next_label (insn)
3245 rtx insn;
3247 while (insn)
3249 insn = NEXT_INSN (insn);
3250 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3251 break;
3254 return insn;
3257 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3260 prev_label (insn)
3261 rtx insn;
3263 while (insn)
3265 insn = PREV_INSN (insn);
3266 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3267 break;
3270 return insn;
3273 #ifdef HAVE_cc0
3274 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3275 and REG_CC_USER notes so we can find it. */
3277 void
3278 link_cc0_insns (insn)
3279 rtx insn;
3281 rtx user = next_nonnote_insn (insn);
3283 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3284 user = XVECEXP (PATTERN (user), 0, 0);
3286 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3287 REG_NOTES (user));
3288 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3291 /* Return the next insn that uses CC0 after INSN, which is assumed to
3292 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3293 applied to the result of this function should yield INSN).
3295 Normally, this is simply the next insn. However, if a REG_CC_USER note
3296 is present, it contains the insn that uses CC0.
3298 Return 0 if we can't find the insn. */
3301 next_cc0_user (insn)
3302 rtx insn;
3304 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3306 if (note)
3307 return XEXP (note, 0);
3309 insn = next_nonnote_insn (insn);
3310 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3311 insn = XVECEXP (PATTERN (insn), 0, 0);
3313 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3314 return insn;
3316 return 0;
3319 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3320 note, it is the previous insn. */
3323 prev_cc0_setter (insn)
3324 rtx insn;
3326 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3328 if (note)
3329 return XEXP (note, 0);
3331 insn = prev_nonnote_insn (insn);
3332 if (! sets_cc0_p (PATTERN (insn)))
3333 abort ();
3335 return insn;
3337 #endif
3339 /* Increment the label uses for all labels present in rtx. */
3341 static void
3342 mark_label_nuses (x)
3343 rtx x;
3345 enum rtx_code code;
3346 int i, j;
3347 const char *fmt;
3349 code = GET_CODE (x);
3350 if (code == LABEL_REF)
3351 LABEL_NUSES (XEXP (x, 0))++;
3353 fmt = GET_RTX_FORMAT (code);
3354 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3356 if (fmt[i] == 'e')
3357 mark_label_nuses (XEXP (x, i));
3358 else if (fmt[i] == 'E')
3359 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3360 mark_label_nuses (XVECEXP (x, i, j));
3365 /* Try splitting insns that can be split for better scheduling.
3366 PAT is the pattern which might split.
3367 TRIAL is the insn providing PAT.
3368 LAST is nonzero if we should return the last insn of the sequence produced.
3370 If this routine succeeds in splitting, it returns the first or last
3371 replacement insn depending on the value of LAST. Otherwise, it
3372 returns TRIAL. If the insn to be returned can be split, it will be. */
3375 try_split (pat, trial, last)
3376 rtx pat, trial;
3377 int last;
3379 rtx before = PREV_INSN (trial);
3380 rtx after = NEXT_INSN (trial);
3381 int has_barrier = 0;
3382 rtx tem;
3383 rtx note, seq;
3384 int probability;
3385 rtx insn_last, insn;
3386 int njumps = 0;
3388 if (any_condjump_p (trial)
3389 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3390 split_branch_probability = INTVAL (XEXP (note, 0));
3391 probability = split_branch_probability;
3393 seq = split_insns (pat, trial);
3395 split_branch_probability = -1;
3397 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3398 We may need to handle this specially. */
3399 if (after && GET_CODE (after) == BARRIER)
3401 has_barrier = 1;
3402 after = NEXT_INSN (after);
3405 if (!seq)
3406 return trial;
3408 /* Avoid infinite loop if any insn of the result matches
3409 the original pattern. */
3410 insn_last = seq;
3411 while (1)
3413 if (INSN_P (insn_last)
3414 && rtx_equal_p (PATTERN (insn_last), pat))
3415 return trial;
3416 if (!NEXT_INSN (insn_last))
3417 break;
3418 insn_last = NEXT_INSN (insn_last);
3421 /* Mark labels. */
3422 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3424 if (GET_CODE (insn) == JUMP_INSN)
3426 mark_jump_label (PATTERN (insn), insn, 0);
3427 njumps++;
3428 if (probability != -1
3429 && any_condjump_p (insn)
3430 && !find_reg_note (insn, REG_BR_PROB, 0))
3432 /* We can preserve the REG_BR_PROB notes only if exactly
3433 one jump is created, otherwise the machine description
3434 is responsible for this step using
3435 split_branch_probability variable. */
3436 if (njumps != 1)
3437 abort ();
3438 REG_NOTES (insn)
3439 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3440 GEN_INT (probability),
3441 REG_NOTES (insn));
3446 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3447 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3448 if (GET_CODE (trial) == CALL_INSN)
3450 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3451 if (GET_CODE (insn) == CALL_INSN)
3453 CALL_INSN_FUNCTION_USAGE (insn)
3454 = CALL_INSN_FUNCTION_USAGE (trial);
3455 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3459 /* Copy notes, particularly those related to the CFG. */
3460 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3462 switch (REG_NOTE_KIND (note))
3464 case REG_EH_REGION:
3465 insn = insn_last;
3466 while (insn != NULL_RTX)
3468 if (GET_CODE (insn) == CALL_INSN
3469 || (flag_non_call_exceptions
3470 && may_trap_p (PATTERN (insn))))
3471 REG_NOTES (insn)
3472 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3473 XEXP (note, 0),
3474 REG_NOTES (insn));
3475 insn = PREV_INSN (insn);
3477 break;
3479 case REG_NORETURN:
3480 case REG_SETJMP:
3481 case REG_ALWAYS_RETURN:
3482 insn = insn_last;
3483 while (insn != NULL_RTX)
3485 if (GET_CODE (insn) == CALL_INSN)
3486 REG_NOTES (insn)
3487 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3488 XEXP (note, 0),
3489 REG_NOTES (insn));
3490 insn = PREV_INSN (insn);
3492 break;
3494 case REG_NON_LOCAL_GOTO:
3495 insn = insn_last;
3496 while (insn != NULL_RTX)
3498 if (GET_CODE (insn) == JUMP_INSN)
3499 REG_NOTES (insn)
3500 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3501 XEXP (note, 0),
3502 REG_NOTES (insn));
3503 insn = PREV_INSN (insn);
3505 break;
3507 default:
3508 break;
3512 /* If there are LABELS inside the split insns increment the
3513 usage count so we don't delete the label. */
3514 if (GET_CODE (trial) == INSN)
3516 insn = insn_last;
3517 while (insn != NULL_RTX)
3519 if (GET_CODE (insn) == INSN)
3520 mark_label_nuses (PATTERN (insn));
3522 insn = PREV_INSN (insn);
3526 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3528 delete_insn (trial);
3529 if (has_barrier)
3530 emit_barrier_after (tem);
3532 /* Recursively call try_split for each new insn created; by the
3533 time control returns here that insn will be fully split, so
3534 set LAST and continue from the insn after the one returned.
3535 We can't use next_active_insn here since AFTER may be a note.
3536 Ignore deleted insns, which can be occur if not optimizing. */
3537 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3538 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3539 tem = try_split (PATTERN (tem), tem, 1);
3541 /* Return either the first or the last insn, depending on which was
3542 requested. */
3543 return last
3544 ? (after ? PREV_INSN (after) : last_insn)
3545 : NEXT_INSN (before);
3548 /* Make and return an INSN rtx, initializing all its slots.
3549 Store PATTERN in the pattern slots. */
3552 make_insn_raw (pattern)
3553 rtx pattern;
3555 rtx insn;
3557 insn = rtx_alloc (INSN);
3559 INSN_UID (insn) = cur_insn_uid++;
3560 PATTERN (insn) = pattern;
3561 INSN_CODE (insn) = -1;
3562 LOG_LINKS (insn) = NULL;
3563 REG_NOTES (insn) = NULL;
3564 INSN_SCOPE (insn) = NULL;
3565 BLOCK_FOR_INSN (insn) = NULL;
3567 #ifdef ENABLE_RTL_CHECKING
3568 if (insn
3569 && INSN_P (insn)
3570 && (returnjump_p (insn)
3571 || (GET_CODE (insn) == SET
3572 && SET_DEST (insn) == pc_rtx)))
3574 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3575 debug_rtx (insn);
3577 #endif
3579 return insn;
3582 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3584 static rtx
3585 make_jump_insn_raw (pattern)
3586 rtx pattern;
3588 rtx insn;
3590 insn = rtx_alloc (JUMP_INSN);
3591 INSN_UID (insn) = cur_insn_uid++;
3593 PATTERN (insn) = pattern;
3594 INSN_CODE (insn) = -1;
3595 LOG_LINKS (insn) = NULL;
3596 REG_NOTES (insn) = NULL;
3597 JUMP_LABEL (insn) = NULL;
3598 INSN_SCOPE (insn) = NULL;
3599 BLOCK_FOR_INSN (insn) = NULL;
3601 return insn;
3604 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3606 static rtx
3607 make_call_insn_raw (pattern)
3608 rtx pattern;
3610 rtx insn;
3612 insn = rtx_alloc (CALL_INSN);
3613 INSN_UID (insn) = cur_insn_uid++;
3615 PATTERN (insn) = pattern;
3616 INSN_CODE (insn) = -1;
3617 LOG_LINKS (insn) = NULL;
3618 REG_NOTES (insn) = NULL;
3619 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3620 INSN_SCOPE (insn) = NULL;
3621 BLOCK_FOR_INSN (insn) = NULL;
3623 return insn;
3626 /* Add INSN to the end of the doubly-linked list.
3627 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3629 void
3630 add_insn (insn)
3631 rtx insn;
3633 PREV_INSN (insn) = last_insn;
3634 NEXT_INSN (insn) = 0;
3636 if (NULL != last_insn)
3637 NEXT_INSN (last_insn) = insn;
3639 if (NULL == first_insn)
3640 first_insn = insn;
3642 last_insn = insn;
3645 /* Add INSN into the doubly-linked list after insn AFTER. This and
3646 the next should be the only functions called to insert an insn once
3647 delay slots have been filled since only they know how to update a
3648 SEQUENCE. */
3650 void
3651 add_insn_after (insn, after)
3652 rtx insn, after;
3654 rtx next = NEXT_INSN (after);
3655 basic_block bb;
3657 if (optimize && INSN_DELETED_P (after))
3658 abort ();
3660 NEXT_INSN (insn) = next;
3661 PREV_INSN (insn) = after;
3663 if (next)
3665 PREV_INSN (next) = insn;
3666 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3667 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3669 else if (last_insn == after)
3670 last_insn = insn;
3671 else
3673 struct sequence_stack *stack = seq_stack;
3674 /* Scan all pending sequences too. */
3675 for (; stack; stack = stack->next)
3676 if (after == stack->last)
3678 stack->last = insn;
3679 break;
3682 if (stack == 0)
3683 abort ();
3686 if (GET_CODE (after) != BARRIER
3687 && GET_CODE (insn) != BARRIER
3688 && (bb = BLOCK_FOR_INSN (after)))
3690 set_block_for_insn (insn, bb);
3691 if (INSN_P (insn))
3692 bb->flags |= BB_DIRTY;
3693 /* Should not happen as first in the BB is always
3694 either NOTE or LABEL. */
3695 if (bb->end == after
3696 /* Avoid clobbering of structure when creating new BB. */
3697 && GET_CODE (insn) != BARRIER
3698 && (GET_CODE (insn) != NOTE
3699 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3700 bb->end = insn;
3703 NEXT_INSN (after) = insn;
3704 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3706 rtx sequence = PATTERN (after);
3707 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3711 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3712 the previous should be the only functions called to insert an insn once
3713 delay slots have been filled since only they know how to update a
3714 SEQUENCE. */
3716 void
3717 add_insn_before (insn, before)
3718 rtx insn, before;
3720 rtx prev = PREV_INSN (before);
3721 basic_block bb;
3723 if (optimize && INSN_DELETED_P (before))
3724 abort ();
3726 PREV_INSN (insn) = prev;
3727 NEXT_INSN (insn) = before;
3729 if (prev)
3731 NEXT_INSN (prev) = insn;
3732 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3734 rtx sequence = PATTERN (prev);
3735 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3738 else if (first_insn == before)
3739 first_insn = insn;
3740 else
3742 struct sequence_stack *stack = seq_stack;
3743 /* Scan all pending sequences too. */
3744 for (; stack; stack = stack->next)
3745 if (before == stack->first)
3747 stack->first = insn;
3748 break;
3751 if (stack == 0)
3752 abort ();
3755 if (GET_CODE (before) != BARRIER
3756 && GET_CODE (insn) != BARRIER
3757 && (bb = BLOCK_FOR_INSN (before)))
3759 set_block_for_insn (insn, bb);
3760 if (INSN_P (insn))
3761 bb->flags |= BB_DIRTY;
3762 /* Should not happen as first in the BB is always
3763 either NOTE or LABEl. */
3764 if (bb->head == insn
3765 /* Avoid clobbering of structure when creating new BB. */
3766 && GET_CODE (insn) != BARRIER
3767 && (GET_CODE (insn) != NOTE
3768 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3769 abort ();
3772 PREV_INSN (before) = insn;
3773 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3774 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3777 /* Remove an insn from its doubly-linked list. This function knows how
3778 to handle sequences. */
3779 void
3780 remove_insn (insn)
3781 rtx insn;
3783 rtx next = NEXT_INSN (insn);
3784 rtx prev = PREV_INSN (insn);
3785 basic_block bb;
3787 if (prev)
3789 NEXT_INSN (prev) = next;
3790 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3792 rtx sequence = PATTERN (prev);
3793 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3796 else if (first_insn == insn)
3797 first_insn = next;
3798 else
3800 struct sequence_stack *stack = seq_stack;
3801 /* Scan all pending sequences too. */
3802 for (; stack; stack = stack->next)
3803 if (insn == stack->first)
3805 stack->first = next;
3806 break;
3809 if (stack == 0)
3810 abort ();
3813 if (next)
3815 PREV_INSN (next) = prev;
3816 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3817 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3819 else if (last_insn == insn)
3820 last_insn = prev;
3821 else
3823 struct sequence_stack *stack = seq_stack;
3824 /* Scan all pending sequences too. */
3825 for (; stack; stack = stack->next)
3826 if (insn == stack->last)
3828 stack->last = prev;
3829 break;
3832 if (stack == 0)
3833 abort ();
3835 if (GET_CODE (insn) != BARRIER
3836 && (bb = BLOCK_FOR_INSN (insn)))
3838 if (INSN_P (insn))
3839 bb->flags |= BB_DIRTY;
3840 if (bb->head == insn)
3842 /* Never ever delete the basic block note without deleting whole
3843 basic block. */
3844 if (GET_CODE (insn) == NOTE)
3845 abort ();
3846 bb->head = next;
3848 if (bb->end == insn)
3849 bb->end = prev;
3853 /* Delete all insns made since FROM.
3854 FROM becomes the new last instruction. */
3856 void
3857 delete_insns_since (from)
3858 rtx from;
3860 if (from == 0)
3861 first_insn = 0;
3862 else
3863 NEXT_INSN (from) = 0;
3864 last_insn = from;
3867 /* This function is deprecated, please use sequences instead.
3869 Move a consecutive bunch of insns to a different place in the chain.
3870 The insns to be moved are those between FROM and TO.
3871 They are moved to a new position after the insn AFTER.
3872 AFTER must not be FROM or TO or any insn in between.
3874 This function does not know about SEQUENCEs and hence should not be
3875 called after delay-slot filling has been done. */
3877 void
3878 reorder_insns_nobb (from, to, after)
3879 rtx from, to, after;
3881 /* Splice this bunch out of where it is now. */
3882 if (PREV_INSN (from))
3883 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3884 if (NEXT_INSN (to))
3885 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3886 if (last_insn == to)
3887 last_insn = PREV_INSN (from);
3888 if (first_insn == from)
3889 first_insn = NEXT_INSN (to);
3891 /* Make the new neighbors point to it and it to them. */
3892 if (NEXT_INSN (after))
3893 PREV_INSN (NEXT_INSN (after)) = to;
3895 NEXT_INSN (to) = NEXT_INSN (after);
3896 PREV_INSN (from) = after;
3897 NEXT_INSN (after) = from;
3898 if (after == last_insn)
3899 last_insn = to;
3902 /* Same as function above, but take care to update BB boundaries. */
3903 void
3904 reorder_insns (from, to, after)
3905 rtx from, to, after;
3907 rtx prev = PREV_INSN (from);
3908 basic_block bb, bb2;
3910 reorder_insns_nobb (from, to, after);
3912 if (GET_CODE (after) != BARRIER
3913 && (bb = BLOCK_FOR_INSN (after)))
3915 rtx x;
3916 bb->flags |= BB_DIRTY;
3918 if (GET_CODE (from) != BARRIER
3919 && (bb2 = BLOCK_FOR_INSN (from)))
3921 if (bb2->end == to)
3922 bb2->end = prev;
3923 bb2->flags |= BB_DIRTY;
3926 if (bb->end == after)
3927 bb->end = to;
3929 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3930 set_block_for_insn (x, bb);
3934 /* Return the line note insn preceding INSN. */
3936 static rtx
3937 find_line_note (insn)
3938 rtx insn;
3940 if (no_line_numbers)
3941 return 0;
3943 for (; insn; insn = PREV_INSN (insn))
3944 if (GET_CODE (insn) == NOTE
3945 && NOTE_LINE_NUMBER (insn) >= 0)
3946 break;
3948 return insn;
3951 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3952 of the moved insns when debugging. This may insert a note between AFTER
3953 and FROM, and another one after TO. */
3955 void
3956 reorder_insns_with_line_notes (from, to, after)
3957 rtx from, to, after;
3959 rtx from_line = find_line_note (from);
3960 rtx after_line = find_line_note (after);
3962 reorder_insns (from, to, after);
3964 if (from_line == after_line)
3965 return;
3967 if (from_line)
3968 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3969 NOTE_LINE_NUMBER (from_line),
3970 after);
3971 if (after_line)
3972 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3973 NOTE_LINE_NUMBER (after_line),
3974 to);
3977 /* Remove unnecessary notes from the instruction stream. */
3979 void
3980 remove_unnecessary_notes ()
3982 rtx block_stack = NULL_RTX;
3983 rtx eh_stack = NULL_RTX;
3984 rtx insn;
3985 rtx next;
3986 rtx tmp;
3988 /* We must not remove the first instruction in the function because
3989 the compiler depends on the first instruction being a note. */
3990 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3992 /* Remember what's next. */
3993 next = NEXT_INSN (insn);
3995 /* We're only interested in notes. */
3996 if (GET_CODE (insn) != NOTE)
3997 continue;
3999 switch (NOTE_LINE_NUMBER (insn))
4001 case NOTE_INSN_DELETED:
4002 case NOTE_INSN_LOOP_END_TOP_COND:
4003 remove_insn (insn);
4004 break;
4006 case NOTE_INSN_EH_REGION_BEG:
4007 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4008 break;
4010 case NOTE_INSN_EH_REGION_END:
4011 /* Too many end notes. */
4012 if (eh_stack == NULL_RTX)
4013 abort ();
4014 /* Mismatched nesting. */
4015 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4016 abort ();
4017 tmp = eh_stack;
4018 eh_stack = XEXP (eh_stack, 1);
4019 free_INSN_LIST_node (tmp);
4020 break;
4022 case NOTE_INSN_BLOCK_BEG:
4023 /* By now, all notes indicating lexical blocks should have
4024 NOTE_BLOCK filled in. */
4025 if (NOTE_BLOCK (insn) == NULL_TREE)
4026 abort ();
4027 block_stack = alloc_INSN_LIST (insn, block_stack);
4028 break;
4030 case NOTE_INSN_BLOCK_END:
4031 /* Too many end notes. */
4032 if (block_stack == NULL_RTX)
4033 abort ();
4034 /* Mismatched nesting. */
4035 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4036 abort ();
4037 tmp = block_stack;
4038 block_stack = XEXP (block_stack, 1);
4039 free_INSN_LIST_node (tmp);
4041 /* Scan back to see if there are any non-note instructions
4042 between INSN and the beginning of this block. If not,
4043 then there is no PC range in the generated code that will
4044 actually be in this block, so there's no point in
4045 remembering the existence of the block. */
4046 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4048 /* This block contains a real instruction. Note that we
4049 don't include labels; if the only thing in the block
4050 is a label, then there are still no PC values that
4051 lie within the block. */
4052 if (INSN_P (tmp))
4053 break;
4055 /* We're only interested in NOTEs. */
4056 if (GET_CODE (tmp) != NOTE)
4057 continue;
4059 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4061 /* We just verified that this BLOCK matches us with
4062 the block_stack check above. Never delete the
4063 BLOCK for the outermost scope of the function; we
4064 can refer to names from that scope even if the
4065 block notes are messed up. */
4066 if (! is_body_block (NOTE_BLOCK (insn))
4067 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4069 remove_insn (tmp);
4070 remove_insn (insn);
4072 break;
4074 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4075 /* There's a nested block. We need to leave the
4076 current block in place since otherwise the debugger
4077 wouldn't be able to show symbols from our block in
4078 the nested block. */
4079 break;
4084 /* Too many begin notes. */
4085 if (block_stack || eh_stack)
4086 abort ();
4090 /* Emit insn(s) of given code and pattern
4091 at a specified place within the doubly-linked list.
4093 All of the emit_foo global entry points accept an object
4094 X which is either an insn list or a PATTERN of a single
4095 instruction.
4097 There are thus a few canonical ways to generate code and
4098 emit it at a specific place in the instruction stream. For
4099 example, consider the instruction named SPOT and the fact that
4100 we would like to emit some instructions before SPOT. We might
4101 do it like this:
4103 start_sequence ();
4104 ... emit the new instructions ...
4105 insns_head = get_insns ();
4106 end_sequence ();
4108 emit_insn_before (insns_head, SPOT);
4110 It used to be common to generate SEQUENCE rtl instead, but that
4111 is a relic of the past which no longer occurs. The reason is that
4112 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4113 generated would almost certainly die right after it was created. */
4115 /* Make X be output before the instruction BEFORE. */
4118 emit_insn_before (x, before)
4119 rtx x, before;
4121 rtx last = before;
4122 rtx insn;
4124 #ifdef ENABLE_RTL_CHECKING
4125 if (before == NULL_RTX)
4126 abort ();
4127 #endif
4129 if (x == NULL_RTX)
4130 return last;
4132 switch (GET_CODE (x))
4134 case INSN:
4135 case JUMP_INSN:
4136 case CALL_INSN:
4137 case CODE_LABEL:
4138 case BARRIER:
4139 case NOTE:
4140 insn = x;
4141 while (insn)
4143 rtx next = NEXT_INSN (insn);
4144 add_insn_before (insn, before);
4145 last = insn;
4146 insn = next;
4148 break;
4150 #ifdef ENABLE_RTL_CHECKING
4151 case SEQUENCE:
4152 abort ();
4153 break;
4154 #endif
4156 default:
4157 last = make_insn_raw (x);
4158 add_insn_before (last, before);
4159 break;
4162 return last;
4165 /* Make an instruction with body X and code JUMP_INSN
4166 and output it before the instruction BEFORE. */
4169 emit_jump_insn_before (x, before)
4170 rtx x, before;
4172 rtx insn, last = NULL_RTX;
4174 #ifdef ENABLE_RTL_CHECKING
4175 if (before == NULL_RTX)
4176 abort ();
4177 #endif
4179 switch (GET_CODE (x))
4181 case INSN:
4182 case JUMP_INSN:
4183 case CALL_INSN:
4184 case CODE_LABEL:
4185 case BARRIER:
4186 case NOTE:
4187 insn = x;
4188 while (insn)
4190 rtx next = NEXT_INSN (insn);
4191 add_insn_before (insn, before);
4192 last = insn;
4193 insn = next;
4195 break;
4197 #ifdef ENABLE_RTL_CHECKING
4198 case SEQUENCE:
4199 abort ();
4200 break;
4201 #endif
4203 default:
4204 last = make_jump_insn_raw (x);
4205 add_insn_before (last, before);
4206 break;
4209 return last;
4212 /* Make an instruction with body X and code CALL_INSN
4213 and output it before the instruction BEFORE. */
4216 emit_call_insn_before (x, before)
4217 rtx x, before;
4219 rtx last = NULL_RTX, insn;
4221 #ifdef ENABLE_RTL_CHECKING
4222 if (before == NULL_RTX)
4223 abort ();
4224 #endif
4226 switch (GET_CODE (x))
4228 case INSN:
4229 case JUMP_INSN:
4230 case CALL_INSN:
4231 case CODE_LABEL:
4232 case BARRIER:
4233 case NOTE:
4234 insn = x;
4235 while (insn)
4237 rtx next = NEXT_INSN (insn);
4238 add_insn_before (insn, before);
4239 last = insn;
4240 insn = next;
4242 break;
4244 #ifdef ENABLE_RTL_CHECKING
4245 case SEQUENCE:
4246 abort ();
4247 break;
4248 #endif
4250 default:
4251 last = make_call_insn_raw (x);
4252 add_insn_before (last, before);
4253 break;
4256 return last;
4259 /* Make an insn of code BARRIER
4260 and output it before the insn BEFORE. */
4263 emit_barrier_before (before)
4264 rtx before;
4266 rtx insn = rtx_alloc (BARRIER);
4268 INSN_UID (insn) = cur_insn_uid++;
4270 add_insn_before (insn, before);
4271 return insn;
4274 /* Emit the label LABEL before the insn BEFORE. */
4277 emit_label_before (label, before)
4278 rtx label, before;
4280 /* This can be called twice for the same label as a result of the
4281 confusion that follows a syntax error! So make it harmless. */
4282 if (INSN_UID (label) == 0)
4284 INSN_UID (label) = cur_insn_uid++;
4285 add_insn_before (label, before);
4288 return label;
4291 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4294 emit_note_before (subtype, before)
4295 int subtype;
4296 rtx before;
4298 rtx note = rtx_alloc (NOTE);
4299 INSN_UID (note) = cur_insn_uid++;
4300 NOTE_SOURCE_FILE (note) = 0;
4301 NOTE_LINE_NUMBER (note) = subtype;
4302 BLOCK_FOR_INSN (note) = NULL;
4304 add_insn_before (note, before);
4305 return note;
4308 /* Helper for emit_insn_after, handles lists of instructions
4309 efficiently. */
4311 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4313 static rtx
4314 emit_insn_after_1 (first, after)
4315 rtx first, after;
4317 rtx last;
4318 rtx after_after;
4319 basic_block bb;
4321 if (GET_CODE (after) != BARRIER
4322 && (bb = BLOCK_FOR_INSN (after)))
4324 bb->flags |= BB_DIRTY;
4325 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4326 if (GET_CODE (last) != BARRIER)
4327 set_block_for_insn (last, bb);
4328 if (GET_CODE (last) != BARRIER)
4329 set_block_for_insn (last, bb);
4330 if (bb->end == after)
4331 bb->end = last;
4333 else
4334 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4335 continue;
4337 after_after = NEXT_INSN (after);
4339 NEXT_INSN (after) = first;
4340 PREV_INSN (first) = after;
4341 NEXT_INSN (last) = after_after;
4342 if (after_after)
4343 PREV_INSN (after_after) = last;
4345 if (after == last_insn)
4346 last_insn = last;
4347 return last;
4350 /* Make X be output after the insn AFTER. */
4353 emit_insn_after (x, after)
4354 rtx x, after;
4356 rtx last = after;
4358 #ifdef ENABLE_RTL_CHECKING
4359 if (after == NULL_RTX)
4360 abort ();
4361 #endif
4363 if (x == NULL_RTX)
4364 return last;
4366 switch (GET_CODE (x))
4368 case INSN:
4369 case JUMP_INSN:
4370 case CALL_INSN:
4371 case CODE_LABEL:
4372 case BARRIER:
4373 case NOTE:
4374 last = emit_insn_after_1 (x, after);
4375 break;
4377 #ifdef ENABLE_RTL_CHECKING
4378 case SEQUENCE:
4379 abort ();
4380 break;
4381 #endif
4383 default:
4384 last = make_insn_raw (x);
4385 add_insn_after (last, after);
4386 break;
4389 return last;
4392 /* Similar to emit_insn_after, except that line notes are to be inserted so
4393 as to act as if this insn were at FROM. */
4395 void
4396 emit_insn_after_with_line_notes (x, after, from)
4397 rtx x, after, from;
4399 rtx from_line = find_line_note (from);
4400 rtx after_line = find_line_note (after);
4401 rtx insn = emit_insn_after (x, after);
4403 if (from_line)
4404 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4405 NOTE_LINE_NUMBER (from_line),
4406 after);
4408 if (after_line)
4409 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4410 NOTE_LINE_NUMBER (after_line),
4411 insn);
4414 /* Make an insn of code JUMP_INSN with body X
4415 and output it after the insn AFTER. */
4418 emit_jump_insn_after (x, after)
4419 rtx x, after;
4421 rtx last;
4423 #ifdef ENABLE_RTL_CHECKING
4424 if (after == NULL_RTX)
4425 abort ();
4426 #endif
4428 switch (GET_CODE (x))
4430 case INSN:
4431 case JUMP_INSN:
4432 case CALL_INSN:
4433 case CODE_LABEL:
4434 case BARRIER:
4435 case NOTE:
4436 last = emit_insn_after_1 (x, after);
4437 break;
4439 #ifdef ENABLE_RTL_CHECKING
4440 case SEQUENCE:
4441 abort ();
4442 break;
4443 #endif
4445 default:
4446 last = make_jump_insn_raw (x);
4447 add_insn_after (last, after);
4448 break;
4451 return last;
4454 /* Make an instruction with body X and code CALL_INSN
4455 and output it after the instruction AFTER. */
4458 emit_call_insn_after (x, after)
4459 rtx x, after;
4461 rtx last;
4463 #ifdef ENABLE_RTL_CHECKING
4464 if (after == NULL_RTX)
4465 abort ();
4466 #endif
4468 switch (GET_CODE (x))
4470 case INSN:
4471 case JUMP_INSN:
4472 case CALL_INSN:
4473 case CODE_LABEL:
4474 case BARRIER:
4475 case NOTE:
4476 last = emit_insn_after_1 (x, after);
4477 break;
4479 #ifdef ENABLE_RTL_CHECKING
4480 case SEQUENCE:
4481 abort ();
4482 break;
4483 #endif
4485 default:
4486 last = make_call_insn_raw (x);
4487 add_insn_after (last, after);
4488 break;
4491 return last;
4494 /* Make an insn of code BARRIER
4495 and output it after the insn AFTER. */
4498 emit_barrier_after (after)
4499 rtx after;
4501 rtx insn = rtx_alloc (BARRIER);
4503 INSN_UID (insn) = cur_insn_uid++;
4505 add_insn_after (insn, after);
4506 return insn;
4509 /* Emit the label LABEL after the insn AFTER. */
4512 emit_label_after (label, after)
4513 rtx label, after;
4515 /* This can be called twice for the same label
4516 as a result of the confusion that follows a syntax error!
4517 So make it harmless. */
4518 if (INSN_UID (label) == 0)
4520 INSN_UID (label) = cur_insn_uid++;
4521 add_insn_after (label, after);
4524 return label;
4527 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4530 emit_note_after (subtype, after)
4531 int subtype;
4532 rtx after;
4534 rtx note = rtx_alloc (NOTE);
4535 INSN_UID (note) = cur_insn_uid++;
4536 NOTE_SOURCE_FILE (note) = 0;
4537 NOTE_LINE_NUMBER (note) = subtype;
4538 BLOCK_FOR_INSN (note) = NULL;
4539 add_insn_after (note, after);
4540 return note;
4543 /* Emit a line note for FILE and LINE after the insn AFTER. */
4546 emit_line_note_after (file, line, after)
4547 const char *file;
4548 int line;
4549 rtx after;
4551 rtx note;
4553 if (no_line_numbers && line > 0)
4555 cur_insn_uid++;
4556 return 0;
4559 note = rtx_alloc (NOTE);
4560 INSN_UID (note) = cur_insn_uid++;
4561 NOTE_SOURCE_FILE (note) = file;
4562 NOTE_LINE_NUMBER (note) = line;
4563 BLOCK_FOR_INSN (note) = NULL;
4564 add_insn_after (note, after);
4565 return note;
4568 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4570 emit_insn_after_scope (pattern, after, scope)
4571 rtx pattern, after;
4572 tree scope;
4574 rtx last = emit_insn_after (pattern, after);
4576 after = NEXT_INSN (after);
4577 while (1)
4579 if (active_insn_p (after))
4580 INSN_SCOPE (after) = scope;
4581 if (after == last)
4582 break;
4583 after = NEXT_INSN (after);
4585 return last;
4588 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4590 emit_jump_insn_after_scope (pattern, after, scope)
4591 rtx pattern, after;
4592 tree scope;
4594 rtx last = emit_jump_insn_after (pattern, after);
4596 after = NEXT_INSN (after);
4597 while (1)
4599 if (active_insn_p (after))
4600 INSN_SCOPE (after) = scope;
4601 if (after == last)
4602 break;
4603 after = NEXT_INSN (after);
4605 return last;
4608 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4610 emit_call_insn_after_scope (pattern, after, scope)
4611 rtx pattern, after;
4612 tree scope;
4614 rtx last = emit_call_insn_after (pattern, after);
4616 after = NEXT_INSN (after);
4617 while (1)
4619 if (active_insn_p (after))
4620 INSN_SCOPE (after) = scope;
4621 if (after == last)
4622 break;
4623 after = NEXT_INSN (after);
4625 return last;
4628 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4630 emit_insn_before_scope (pattern, before, scope)
4631 rtx pattern, before;
4632 tree scope;
4634 rtx first = PREV_INSN (before);
4635 rtx last = emit_insn_before (pattern, before);
4637 first = NEXT_INSN (first);
4638 while (1)
4640 if (active_insn_p (first))
4641 INSN_SCOPE (first) = scope;
4642 if (first == last)
4643 break;
4644 first = NEXT_INSN (first);
4646 return last;
4649 /* Take X and emit it at the end of the doubly-linked
4650 INSN list.
4652 Returns the last insn emitted. */
4655 emit_insn (x)
4656 rtx x;
4658 rtx last = last_insn;
4659 rtx insn;
4661 if (x == NULL_RTX)
4662 return last;
4664 switch (GET_CODE (x))
4666 case INSN:
4667 case JUMP_INSN:
4668 case CALL_INSN:
4669 case CODE_LABEL:
4670 case BARRIER:
4671 case NOTE:
4672 insn = x;
4673 while (insn)
4675 rtx next = NEXT_INSN (insn);
4676 add_insn (insn);
4677 last = insn;
4678 insn = next;
4680 break;
4682 #ifdef ENABLE_RTL_CHECKING
4683 case SEQUENCE:
4684 abort ();
4685 break;
4686 #endif
4688 default:
4689 last = make_insn_raw (x);
4690 add_insn (last);
4691 break;
4694 return last;
4697 /* Make an insn of code JUMP_INSN with pattern X
4698 and add it to the end of the doubly-linked list. */
4701 emit_jump_insn (x)
4702 rtx x;
4704 rtx last = NULL_RTX, insn;
4706 switch (GET_CODE (x))
4708 case INSN:
4709 case JUMP_INSN:
4710 case CALL_INSN:
4711 case CODE_LABEL:
4712 case BARRIER:
4713 case NOTE:
4714 insn = x;
4715 while (insn)
4717 rtx next = NEXT_INSN (insn);
4718 add_insn (insn);
4719 last = insn;
4720 insn = next;
4722 break;
4724 #ifdef ENABLE_RTL_CHECKING
4725 case SEQUENCE:
4726 abort ();
4727 break;
4728 #endif
4730 default:
4731 last = make_jump_insn_raw (x);
4732 add_insn (last);
4733 break;
4736 return last;
4739 /* Make an insn of code CALL_INSN with pattern X
4740 and add it to the end of the doubly-linked list. */
4743 emit_call_insn (x)
4744 rtx x;
4746 rtx insn;
4748 switch (GET_CODE (x))
4750 case INSN:
4751 case JUMP_INSN:
4752 case CALL_INSN:
4753 case CODE_LABEL:
4754 case BARRIER:
4755 case NOTE:
4756 insn = emit_insn (x);
4757 break;
4759 #ifdef ENABLE_RTL_CHECKING
4760 case SEQUENCE:
4761 abort ();
4762 break;
4763 #endif
4765 default:
4766 insn = make_call_insn_raw (x);
4767 add_insn (insn);
4768 break;
4771 return insn;
4774 /* Add the label LABEL to the end of the doubly-linked list. */
4777 emit_label (label)
4778 rtx label;
4780 /* This can be called twice for the same label
4781 as a result of the confusion that follows a syntax error!
4782 So make it harmless. */
4783 if (INSN_UID (label) == 0)
4785 INSN_UID (label) = cur_insn_uid++;
4786 add_insn (label);
4788 return label;
4791 /* Make an insn of code BARRIER
4792 and add it to the end of the doubly-linked list. */
4795 emit_barrier ()
4797 rtx barrier = rtx_alloc (BARRIER);
4798 INSN_UID (barrier) = cur_insn_uid++;
4799 add_insn (barrier);
4800 return barrier;
4803 /* Make an insn of code NOTE
4804 with data-fields specified by FILE and LINE
4805 and add it to the end of the doubly-linked list,
4806 but only if line-numbers are desired for debugging info. */
4809 emit_line_note (file, line)
4810 const char *file;
4811 int line;
4813 set_file_and_line_for_stmt (file, line);
4815 #if 0
4816 if (no_line_numbers)
4817 return 0;
4818 #endif
4820 return emit_note (file, line);
4823 /* Make an insn of code NOTE
4824 with data-fields specified by FILE and LINE
4825 and add it to the end of the doubly-linked list.
4826 If it is a line-number NOTE, omit it if it matches the previous one. */
4829 emit_note (file, line)
4830 const char *file;
4831 int line;
4833 rtx note;
4835 if (line > 0)
4837 if (file && last_filename && !strcmp (file, last_filename)
4838 && line == last_linenum)
4839 return 0;
4840 last_filename = file;
4841 last_linenum = line;
4844 if (no_line_numbers && line > 0)
4846 cur_insn_uid++;
4847 return 0;
4850 note = rtx_alloc (NOTE);
4851 INSN_UID (note) = cur_insn_uid++;
4852 NOTE_SOURCE_FILE (note) = file;
4853 NOTE_LINE_NUMBER (note) = line;
4854 BLOCK_FOR_INSN (note) = NULL;
4855 add_insn (note);
4856 return note;
4859 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4862 emit_line_note_force (file, line)
4863 const char *file;
4864 int line;
4866 last_linenum = -1;
4867 return emit_line_note (file, line);
4870 /* Cause next statement to emit a line note even if the line number
4871 has not changed. This is used at the beginning of a function. */
4873 void
4874 force_next_line_note ()
4876 last_linenum = -1;
4879 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4880 note of this type already exists, remove it first. */
4883 set_unique_reg_note (insn, kind, datum)
4884 rtx insn;
4885 enum reg_note kind;
4886 rtx datum;
4888 rtx note = find_reg_note (insn, kind, NULL_RTX);
4890 switch (kind)
4892 case REG_EQUAL:
4893 case REG_EQUIV:
4894 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4895 has multiple sets (some callers assume single_set
4896 means the insn only has one set, when in fact it
4897 means the insn only has one * useful * set). */
4898 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4900 if (note)
4901 abort ();
4902 return NULL_RTX;
4905 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4906 It serves no useful purpose and breaks eliminate_regs. */
4907 if (GET_CODE (datum) == ASM_OPERANDS)
4908 return NULL_RTX;
4909 break;
4911 default:
4912 break;
4915 if (note)
4917 XEXP (note, 0) = datum;
4918 return note;
4921 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4922 return REG_NOTES (insn);
4925 /* Return an indication of which type of insn should have X as a body.
4926 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4928 enum rtx_code
4929 classify_insn (x)
4930 rtx x;
4932 if (GET_CODE (x) == CODE_LABEL)
4933 return CODE_LABEL;
4934 if (GET_CODE (x) == CALL)
4935 return CALL_INSN;
4936 if (GET_CODE (x) == RETURN)
4937 return JUMP_INSN;
4938 if (GET_CODE (x) == SET)
4940 if (SET_DEST (x) == pc_rtx)
4941 return JUMP_INSN;
4942 else if (GET_CODE (SET_SRC (x)) == CALL)
4943 return CALL_INSN;
4944 else
4945 return INSN;
4947 if (GET_CODE (x) == PARALLEL)
4949 int j;
4950 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4951 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4952 return CALL_INSN;
4953 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4954 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4955 return JUMP_INSN;
4956 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4957 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4958 return CALL_INSN;
4960 return INSN;
4963 /* Emit the rtl pattern X as an appropriate kind of insn.
4964 If X is a label, it is simply added into the insn chain. */
4967 emit (x)
4968 rtx x;
4970 enum rtx_code code = classify_insn (x);
4972 if (code == CODE_LABEL)
4973 return emit_label (x);
4974 else if (code == INSN)
4975 return emit_insn (x);
4976 else if (code == JUMP_INSN)
4978 rtx insn = emit_jump_insn (x);
4979 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4980 return emit_barrier ();
4981 return insn;
4983 else if (code == CALL_INSN)
4984 return emit_call_insn (x);
4985 else
4986 abort ();
4989 /* Space for free sequence stack entries. */
4990 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4992 /* Begin emitting insns to a sequence which can be packaged in an
4993 RTL_EXPR. If this sequence will contain something that might cause
4994 the compiler to pop arguments to function calls (because those
4995 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4996 details), use do_pending_stack_adjust before calling this function.
4997 That will ensure that the deferred pops are not accidentally
4998 emitted in the middle of this sequence. */
5000 void
5001 start_sequence ()
5003 struct sequence_stack *tem;
5005 if (free_sequence_stack != NULL)
5007 tem = free_sequence_stack;
5008 free_sequence_stack = tem->next;
5010 else
5011 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5013 tem->next = seq_stack;
5014 tem->first = first_insn;
5015 tem->last = last_insn;
5016 tem->sequence_rtl_expr = seq_rtl_expr;
5018 seq_stack = tem;
5020 first_insn = 0;
5021 last_insn = 0;
5024 /* Similarly, but indicate that this sequence will be placed in T, an
5025 RTL_EXPR. See the documentation for start_sequence for more
5026 information about how to use this function. */
5028 void
5029 start_sequence_for_rtl_expr (t)
5030 tree t;
5032 start_sequence ();
5034 seq_rtl_expr = t;
5037 /* Set up the insn chain starting with FIRST as the current sequence,
5038 saving the previously current one. See the documentation for
5039 start_sequence for more information about how to use this function. */
5041 void
5042 push_to_sequence (first)
5043 rtx first;
5045 rtx last;
5047 start_sequence ();
5049 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5051 first_insn = first;
5052 last_insn = last;
5055 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5057 void
5058 push_to_full_sequence (first, last)
5059 rtx first, last;
5061 start_sequence ();
5062 first_insn = first;
5063 last_insn = last;
5064 /* We really should have the end of the insn chain here. */
5065 if (last && NEXT_INSN (last))
5066 abort ();
5069 /* Set up the outer-level insn chain
5070 as the current sequence, saving the previously current one. */
5072 void
5073 push_topmost_sequence ()
5075 struct sequence_stack *stack, *top = NULL;
5077 start_sequence ();
5079 for (stack = seq_stack; stack; stack = stack->next)
5080 top = stack;
5082 first_insn = top->first;
5083 last_insn = top->last;
5084 seq_rtl_expr = top->sequence_rtl_expr;
5087 /* After emitting to the outer-level insn chain, update the outer-level
5088 insn chain, and restore the previous saved state. */
5090 void
5091 pop_topmost_sequence ()
5093 struct sequence_stack *stack, *top = NULL;
5095 for (stack = seq_stack; stack; stack = stack->next)
5096 top = stack;
5098 top->first = first_insn;
5099 top->last = last_insn;
5100 /* ??? Why don't we save seq_rtl_expr here? */
5102 end_sequence ();
5105 /* After emitting to a sequence, restore previous saved state.
5107 To get the contents of the sequence just made, you must call
5108 `get_insns' *before* calling here.
5110 If the compiler might have deferred popping arguments while
5111 generating this sequence, and this sequence will not be immediately
5112 inserted into the instruction stream, use do_pending_stack_adjust
5113 before calling get_insns. That will ensure that the deferred
5114 pops are inserted into this sequence, and not into some random
5115 location in the instruction stream. See INHIBIT_DEFER_POP for more
5116 information about deferred popping of arguments. */
5118 void
5119 end_sequence ()
5121 struct sequence_stack *tem = seq_stack;
5123 first_insn = tem->first;
5124 last_insn = tem->last;
5125 seq_rtl_expr = tem->sequence_rtl_expr;
5126 seq_stack = tem->next;
5128 memset (tem, 0, sizeof (*tem));
5129 tem->next = free_sequence_stack;
5130 free_sequence_stack = tem;
5133 /* This works like end_sequence, but records the old sequence in FIRST
5134 and LAST. */
5136 void
5137 end_full_sequence (first, last)
5138 rtx *first, *last;
5140 *first = first_insn;
5141 *last = last_insn;
5142 end_sequence ();
5145 /* Return 1 if currently emitting into a sequence. */
5148 in_sequence_p ()
5150 return seq_stack != 0;
5153 /* Put the various virtual registers into REGNO_REG_RTX. */
5155 void
5156 init_virtual_regs (es)
5157 struct emit_status *es;
5159 rtx *ptr = es->x_regno_reg_rtx;
5160 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5161 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5162 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5163 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5164 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5168 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5169 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5170 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5171 static int copy_insn_n_scratches;
5173 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5174 copied an ASM_OPERANDS.
5175 In that case, it is the original input-operand vector. */
5176 static rtvec orig_asm_operands_vector;
5178 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5179 copied an ASM_OPERANDS.
5180 In that case, it is the copied input-operand vector. */
5181 static rtvec copy_asm_operands_vector;
5183 /* Likewise for the constraints vector. */
5184 static rtvec orig_asm_constraints_vector;
5185 static rtvec copy_asm_constraints_vector;
5187 /* Recursively create a new copy of an rtx for copy_insn.
5188 This function differs from copy_rtx in that it handles SCRATCHes and
5189 ASM_OPERANDs properly.
5190 Normally, this function is not used directly; use copy_insn as front end.
5191 However, you could first copy an insn pattern with copy_insn and then use
5192 this function afterwards to properly copy any REG_NOTEs containing
5193 SCRATCHes. */
5196 copy_insn_1 (orig)
5197 rtx orig;
5199 rtx copy;
5200 int i, j;
5201 RTX_CODE code;
5202 const char *format_ptr;
5204 code = GET_CODE (orig);
5206 switch (code)
5208 case REG:
5209 case QUEUED:
5210 case CONST_INT:
5211 case CONST_DOUBLE:
5212 case CONST_VECTOR:
5213 case SYMBOL_REF:
5214 case CODE_LABEL:
5215 case PC:
5216 case CC0:
5217 case ADDRESSOF:
5218 return orig;
5220 case SCRATCH:
5221 for (i = 0; i < copy_insn_n_scratches; i++)
5222 if (copy_insn_scratch_in[i] == orig)
5223 return copy_insn_scratch_out[i];
5224 break;
5226 case CONST:
5227 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5228 a LABEL_REF, it isn't sharable. */
5229 if (GET_CODE (XEXP (orig, 0)) == PLUS
5230 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5231 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5232 return orig;
5233 break;
5235 /* A MEM with a constant address is not sharable. The problem is that
5236 the constant address may need to be reloaded. If the mem is shared,
5237 then reloading one copy of this mem will cause all copies to appear
5238 to have been reloaded. */
5240 default:
5241 break;
5244 copy = rtx_alloc (code);
5246 /* Copy the various flags, and other information. We assume that
5247 all fields need copying, and then clear the fields that should
5248 not be copied. That is the sensible default behavior, and forces
5249 us to explicitly document why we are *not* copying a flag. */
5250 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5252 /* We do not copy the USED flag, which is used as a mark bit during
5253 walks over the RTL. */
5254 RTX_FLAG (copy, used) = 0;
5256 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5257 if (GET_RTX_CLASS (code) == 'i')
5259 RTX_FLAG (copy, jump) = 0;
5260 RTX_FLAG (copy, call) = 0;
5261 RTX_FLAG (copy, frame_related) = 0;
5264 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5266 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5268 copy->fld[i] = orig->fld[i];
5269 switch (*format_ptr++)
5271 case 'e':
5272 if (XEXP (orig, i) != NULL)
5273 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5274 break;
5276 case 'E':
5277 case 'V':
5278 if (XVEC (orig, i) == orig_asm_constraints_vector)
5279 XVEC (copy, i) = copy_asm_constraints_vector;
5280 else if (XVEC (orig, i) == orig_asm_operands_vector)
5281 XVEC (copy, i) = copy_asm_operands_vector;
5282 else if (XVEC (orig, i) != NULL)
5284 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5285 for (j = 0; j < XVECLEN (copy, i); j++)
5286 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5288 break;
5290 case 't':
5291 case 'w':
5292 case 'i':
5293 case 's':
5294 case 'S':
5295 case 'u':
5296 case '0':
5297 /* These are left unchanged. */
5298 break;
5300 default:
5301 abort ();
5305 if (code == SCRATCH)
5307 i = copy_insn_n_scratches++;
5308 if (i >= MAX_RECOG_OPERANDS)
5309 abort ();
5310 copy_insn_scratch_in[i] = orig;
5311 copy_insn_scratch_out[i] = copy;
5313 else if (code == ASM_OPERANDS)
5315 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5316 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5317 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5318 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5321 return copy;
5324 /* Create a new copy of an rtx.
5325 This function differs from copy_rtx in that it handles SCRATCHes and
5326 ASM_OPERANDs properly.
5327 INSN doesn't really have to be a full INSN; it could be just the
5328 pattern. */
5330 copy_insn (insn)
5331 rtx insn;
5333 copy_insn_n_scratches = 0;
5334 orig_asm_operands_vector = 0;
5335 orig_asm_constraints_vector = 0;
5336 copy_asm_operands_vector = 0;
5337 copy_asm_constraints_vector = 0;
5338 return copy_insn_1 (insn);
5341 /* Initialize data structures and variables in this file
5342 before generating rtl for each function. */
5344 void
5345 init_emit ()
5347 struct function *f = cfun;
5349 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5350 first_insn = NULL;
5351 last_insn = NULL;
5352 seq_rtl_expr = NULL;
5353 cur_insn_uid = 1;
5354 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5355 last_linenum = 0;
5356 last_filename = 0;
5357 first_label_num = label_num;
5358 last_label_num = 0;
5359 seq_stack = NULL;
5361 /* Init the tables that describe all the pseudo regs. */
5363 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5365 f->emit->regno_pointer_align
5366 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5367 * sizeof (unsigned char));
5369 regno_reg_rtx
5370 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5372 /* Put copies of all the hard registers into regno_reg_rtx. */
5373 memcpy (regno_reg_rtx,
5374 static_regno_reg_rtx,
5375 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5377 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5378 init_virtual_regs (f->emit);
5380 /* Indicate that the virtual registers and stack locations are
5381 all pointers. */
5382 REG_POINTER (stack_pointer_rtx) = 1;
5383 REG_POINTER (frame_pointer_rtx) = 1;
5384 REG_POINTER (hard_frame_pointer_rtx) = 1;
5385 REG_POINTER (arg_pointer_rtx) = 1;
5387 REG_POINTER (virtual_incoming_args_rtx) = 1;
5388 REG_POINTER (virtual_stack_vars_rtx) = 1;
5389 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5390 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5391 REG_POINTER (virtual_cfa_rtx) = 1;
5393 #ifdef STACK_BOUNDARY
5394 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5395 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5396 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5397 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5399 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5400 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5401 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5402 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5403 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5404 #endif
5406 #ifdef INIT_EXPANDERS
5407 INIT_EXPANDERS;
5408 #endif
5411 /* Generate the constant 0. */
5413 static rtx
5414 gen_const_vector_0 (mode)
5415 enum machine_mode mode;
5417 rtx tem;
5418 rtvec v;
5419 int units, i;
5420 enum machine_mode inner;
5422 units = GET_MODE_NUNITS (mode);
5423 inner = GET_MODE_INNER (mode);
5425 v = rtvec_alloc (units);
5427 /* We need to call this function after we to set CONST0_RTX first. */
5428 if (!CONST0_RTX (inner))
5429 abort ();
5431 for (i = 0; i < units; ++i)
5432 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5434 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5435 return tem;
5438 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5439 all elements are zero. */
5441 gen_rtx_CONST_VECTOR (mode, v)
5442 enum machine_mode mode;
5443 rtvec v;
5445 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5446 int i;
5448 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5449 if (RTVEC_ELT (v, i) != inner_zero)
5450 return gen_rtx_raw_CONST_VECTOR (mode, v);
5451 return CONST0_RTX (mode);
5454 /* Create some permanent unique rtl objects shared between all functions.
5455 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5457 void
5458 init_emit_once (line_numbers)
5459 int line_numbers;
5461 int i;
5462 enum machine_mode mode;
5463 enum machine_mode double_mode;
5465 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5466 tables. */
5467 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5468 const_int_htab_eq, NULL);
5470 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5471 const_double_htab_eq, NULL);
5473 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5474 mem_attrs_htab_eq, NULL);
5475 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5476 reg_attrs_htab_eq, NULL);
5478 no_line_numbers = ! line_numbers;
5480 /* Compute the word and byte modes. */
5482 byte_mode = VOIDmode;
5483 word_mode = VOIDmode;
5484 double_mode = VOIDmode;
5486 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5487 mode = GET_MODE_WIDER_MODE (mode))
5489 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5490 && byte_mode == VOIDmode)
5491 byte_mode = mode;
5493 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5494 && word_mode == VOIDmode)
5495 word_mode = mode;
5498 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5499 mode = GET_MODE_WIDER_MODE (mode))
5501 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5502 && double_mode == VOIDmode)
5503 double_mode = mode;
5506 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5508 /* Assign register numbers to the globally defined register rtx.
5509 This must be done at runtime because the register number field
5510 is in a union and some compilers can't initialize unions. */
5512 pc_rtx = gen_rtx (PC, VOIDmode);
5513 cc0_rtx = gen_rtx (CC0, VOIDmode);
5514 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5515 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5516 if (hard_frame_pointer_rtx == 0)
5517 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5518 HARD_FRAME_POINTER_REGNUM);
5519 if (arg_pointer_rtx == 0)
5520 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5521 virtual_incoming_args_rtx =
5522 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5523 virtual_stack_vars_rtx =
5524 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5525 virtual_stack_dynamic_rtx =
5526 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5527 virtual_outgoing_args_rtx =
5528 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5529 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5531 /* Initialize RTL for commonly used hard registers. These are
5532 copied into regno_reg_rtx as we begin to compile each function. */
5533 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5534 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5536 #ifdef INIT_EXPANDERS
5537 /* This is to initialize {init|mark|free}_machine_status before the first
5538 call to push_function_context_to. This is needed by the Chill front
5539 end which calls push_function_context_to before the first call to
5540 init_function_start. */
5541 INIT_EXPANDERS;
5542 #endif
5544 /* Create the unique rtx's for certain rtx codes and operand values. */
5546 /* Don't use gen_rtx here since gen_rtx in this case
5547 tries to use these variables. */
5548 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5549 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5550 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5552 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5553 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5554 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5555 else
5556 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5558 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5559 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5560 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5561 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5562 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5564 dconsthalf = dconst1;
5565 dconsthalf.exp--;
5567 for (i = 0; i <= 2; i++)
5569 REAL_VALUE_TYPE *r =
5570 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5572 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5573 mode = GET_MODE_WIDER_MODE (mode))
5574 const_tiny_rtx[i][(int) mode] =
5575 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5577 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5579 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5580 mode = GET_MODE_WIDER_MODE (mode))
5581 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5584 mode != VOIDmode;
5585 mode = GET_MODE_WIDER_MODE (mode))
5586 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5590 mode != VOIDmode;
5591 mode = GET_MODE_WIDER_MODE (mode))
5592 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5595 mode != VOIDmode;
5596 mode = GET_MODE_WIDER_MODE (mode))
5597 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5599 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5600 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5601 const_tiny_rtx[0][i] = const0_rtx;
5603 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5604 if (STORE_FLAG_VALUE == 1)
5605 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5607 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5608 return_address_pointer_rtx
5609 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5610 #endif
5612 #ifdef STRUCT_VALUE
5613 struct_value_rtx = STRUCT_VALUE;
5614 #else
5615 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5616 #endif
5618 #ifdef STRUCT_VALUE_INCOMING
5619 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5620 #else
5621 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5622 struct_value_incoming_rtx
5623 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5624 #else
5625 struct_value_incoming_rtx = struct_value_rtx;
5626 #endif
5627 #endif
5629 #ifdef STATIC_CHAIN_REGNUM
5630 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5632 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5633 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5634 static_chain_incoming_rtx
5635 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5636 else
5637 #endif
5638 static_chain_incoming_rtx = static_chain_rtx;
5639 #endif
5641 #ifdef STATIC_CHAIN
5642 static_chain_rtx = STATIC_CHAIN;
5644 #ifdef STATIC_CHAIN_INCOMING
5645 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5646 #else
5647 static_chain_incoming_rtx = static_chain_rtx;
5648 #endif
5649 #endif
5651 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5652 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5655 /* Query and clear/ restore no_line_numbers. This is used by the
5656 switch / case handling in stmt.c to give proper line numbers in
5657 warnings about unreachable code. */
5660 force_line_numbers ()
5662 int old = no_line_numbers;
5664 no_line_numbers = 0;
5665 if (old)
5666 force_next_line_note ();
5667 return old;
5670 void
5671 restore_line_number_status (old_value)
5672 int old_value;
5674 no_line_numbers = old_value;
5677 /* Produce exact duplicate of insn INSN after AFTER.
5678 Care updating of libcall regions if present. */
5681 emit_copy_of_insn_after (insn, after)
5682 rtx insn, after;
5684 rtx new;
5685 rtx note1, note2, link;
5687 switch (GET_CODE (insn))
5689 case INSN:
5690 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5691 break;
5693 case JUMP_INSN:
5694 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5695 break;
5697 case CALL_INSN:
5698 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5699 if (CALL_INSN_FUNCTION_USAGE (insn))
5700 CALL_INSN_FUNCTION_USAGE (new)
5701 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5702 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5703 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5704 break;
5706 default:
5707 abort ();
5710 /* Update LABEL_NUSES. */
5711 mark_jump_label (PATTERN (new), new, 0);
5713 INSN_SCOPE (new) = INSN_SCOPE (insn);
5715 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5716 make them. */
5717 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5718 if (REG_NOTE_KIND (link) != REG_LABEL)
5720 if (GET_CODE (link) == EXPR_LIST)
5721 REG_NOTES (new)
5722 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5723 XEXP (link, 0),
5724 REG_NOTES (new)));
5725 else
5726 REG_NOTES (new)
5727 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5728 XEXP (link, 0),
5729 REG_NOTES (new)));
5732 /* Fix the libcall sequences. */
5733 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5735 rtx p = new;
5736 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5737 p = PREV_INSN (p);
5738 XEXP (note1, 0) = p;
5739 XEXP (note2, 0) = new;
5741 INSN_CODE (new) = INSN_CODE (insn);
5742 return new;
5745 #include "gt-emit-rtl.h"