* targhooks.c: New file.
[official-gcc.git] / gcc / emit-rtl.c
blob0668cfb20e4e39663fd4028d7a66b63f2cb76227
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
61 /* Commonly used modes. */
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
72 static GTY(()) int label_num = 1;
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
78 static int last_label_num;
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
83 static int base_label_num;
85 /* Nonzero means do not generate NOTEs for source line numbers. */
87 static int no_line_numbers;
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
94 rtx global_rtl[GR_MAX];
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
108 rtx const_true_rtx;
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
136 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
162 htab_t reg_attrs_htab;
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
166 htab_t const_double_htab;
168 #define first_insn (cfun->emit->x_first_insn)
169 #define last_insn (cfun->emit->x_last_insn)
170 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
171 #define last_location (cfun->emit->x_last_location)
172 #define first_label_num (cfun->emit->x_first_label_num)
174 static rtx make_jump_insn_raw (rtx);
175 static rtx make_call_insn_raw (rtx);
176 static rtx find_line_note (rtx);
177 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
178 static void unshare_all_rtl_1 (rtx);
179 static void unshare_all_decls (tree);
180 static void reset_used_decls (tree);
181 static void mark_label_nuses (rtx);
182 static hashval_t const_int_htab_hash (const void *);
183 static int const_int_htab_eq (const void *, const void *);
184 static hashval_t const_double_htab_hash (const void *);
185 static int const_double_htab_eq (const void *, const void *);
186 static rtx lookup_const_double (rtx);
187 static hashval_t mem_attrs_htab_hash (const void *);
188 static int mem_attrs_htab_eq (const void *, const void *);
189 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
190 enum machine_mode);
191 static hashval_t reg_attrs_htab_hash (const void *);
192 static int reg_attrs_htab_eq (const void *, const void *);
193 static reg_attrs *get_reg_attrs (tree, int);
194 static tree component_ref_for_mem_expr (tree);
195 static rtx gen_const_vector_0 (enum machine_mode);
196 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
198 /* Probability of the conditional branch currently proceeded by try_split.
199 Set to -1 otherwise. */
200 int split_branch_probability = -1;
202 /* Returns a hash code for X (which is a really a CONST_INT). */
204 static hashval_t
205 const_int_htab_hash (const void *x)
207 return (hashval_t) INTVAL ((rtx) x);
210 /* Returns nonzero if the value represented by X (which is really a
211 CONST_INT) is the same as that given by Y (which is really a
212 HOST_WIDE_INT *). */
214 static int
215 const_int_htab_eq (const void *x, const void *y)
217 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
220 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
221 static hashval_t
222 const_double_htab_hash (const void *x)
224 rtx value = (rtx) x;
225 hashval_t h;
227 if (GET_MODE (value) == VOIDmode)
228 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
229 else
231 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
235 return h;
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...) */
240 static int
241 const_double_htab_eq (const void *x, const void *y)
243 rtx a = (rtx)x, b = (rtx)y;
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 if (GET_MODE (a) == VOIDmode)
248 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
249 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
250 else
251 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
252 CONST_DOUBLE_REAL_VALUE (b));
255 /* Returns a hash code for X (which is a really a mem_attrs *). */
257 static hashval_t
258 mem_attrs_htab_hash (const void *x)
260 mem_attrs *p = (mem_attrs *) x;
262 return (p->alias ^ (p->align * 1000)
263 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
264 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
265 ^ (size_t) p->expr);
268 /* Returns nonzero if the value represented by X (which is really a
269 mem_attrs *) is the same as that given by Y (which is also really a
270 mem_attrs *). */
272 static int
273 mem_attrs_htab_eq (const void *x, const void *y)
275 mem_attrs *p = (mem_attrs *) x;
276 mem_attrs *q = (mem_attrs *) y;
278 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
279 && p->size == q->size && p->align == q->align);
282 /* Allocate a new mem_attrs structure and insert it into the hash table if
283 one identical to it is not already in the table. We are doing this for
284 MEM of mode MODE. */
286 static mem_attrs *
287 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
288 unsigned int align, enum machine_mode mode)
290 mem_attrs attrs;
291 void **slot;
293 /* If everything is the default, we can just return zero.
294 This must match what the corresponding MEM_* macros return when the
295 field is not present. */
296 if (alias == 0 && expr == 0 && offset == 0
297 && (size == 0
298 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
299 && (STRICT_ALIGNMENT && mode != BLKmode
300 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
301 return 0;
303 attrs.alias = alias;
304 attrs.expr = expr;
305 attrs.offset = offset;
306 attrs.size = size;
307 attrs.align = align;
309 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
310 if (*slot == 0)
312 *slot = ggc_alloc (sizeof (mem_attrs));
313 memcpy (*slot, &attrs, sizeof (mem_attrs));
316 return *slot;
319 /* Returns a hash code for X (which is a really a reg_attrs *). */
321 static hashval_t
322 reg_attrs_htab_hash (const void *x)
324 reg_attrs *p = (reg_attrs *) x;
326 return ((p->offset * 1000) ^ (long) p->decl);
329 /* Returns nonzero if the value represented by X (which is really a
330 reg_attrs *) is the same as that given by Y (which is also really a
331 reg_attrs *). */
333 static int
334 reg_attrs_htab_eq (const void *x, const void *y)
336 reg_attrs *p = (reg_attrs *) x;
337 reg_attrs *q = (reg_attrs *) y;
339 return (p->decl == q->decl && p->offset == q->offset);
341 /* Allocate a new reg_attrs structure and insert it into the hash table if
342 one identical to it is not already in the table. We are doing this for
343 MEM of mode MODE. */
345 static reg_attrs *
346 get_reg_attrs (tree decl, int offset)
348 reg_attrs attrs;
349 void **slot;
351 /* If everything is the default, we can just return zero. */
352 if (decl == 0 && offset == 0)
353 return 0;
355 attrs.decl = decl;
356 attrs.offset = offset;
358 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
359 if (*slot == 0)
361 *slot = ggc_alloc (sizeof (reg_attrs));
362 memcpy (*slot, &attrs, sizeof (reg_attrs));
365 return *slot;
368 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
369 don't attempt to share with the various global pieces of rtl (such as
370 frame_pointer_rtx). */
373 gen_raw_REG (enum machine_mode mode, int regno)
375 rtx x = gen_rtx_raw_REG (mode, regno);
376 ORIGINAL_REGNO (x) = regno;
377 return x;
380 /* There are some RTL codes that require special attention; the generation
381 functions do the raw handling. If you add to this list, modify
382 special_rtx in gengenrtl.c as well. */
385 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
387 void **slot;
389 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
390 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
392 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
393 if (const_true_rtx && arg == STORE_FLAG_VALUE)
394 return const_true_rtx;
395 #endif
397 /* Look up the CONST_INT in the hash table. */
398 slot = htab_find_slot_with_hash (const_int_htab, &arg,
399 (hashval_t) arg, INSERT);
400 if (*slot == 0)
401 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
403 return (rtx) *slot;
407 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
409 return GEN_INT (trunc_int_for_mode (c, mode));
412 /* CONST_DOUBLEs might be created from pairs of integers, or from
413 REAL_VALUE_TYPEs. Also, their length is known only at run time,
414 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
416 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
417 hash table. If so, return its counterpart; otherwise add it
418 to the hash table and return it. */
419 static rtx
420 lookup_const_double (rtx real)
422 void **slot = htab_find_slot (const_double_htab, real, INSERT);
423 if (*slot == 0)
424 *slot = real;
426 return (rtx) *slot;
429 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
430 VALUE in mode MODE. */
432 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
434 rtx real = rtx_alloc (CONST_DOUBLE);
435 PUT_MODE (real, mode);
437 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
439 return lookup_const_double (real);
442 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
443 of ints: I0 is the low-order word and I1 is the high-order word.
444 Do not use this routine for non-integer modes; convert to
445 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
448 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
450 rtx value;
451 unsigned int i;
453 if (mode != VOIDmode)
455 int width;
456 if (GET_MODE_CLASS (mode) != MODE_INT
457 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
458 /* We can get a 0 for an error mark. */
459 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
460 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
461 abort ();
463 /* We clear out all bits that don't belong in MODE, unless they and
464 our sign bit are all one. So we get either a reasonable negative
465 value or a reasonable unsigned value for this mode. */
466 width = GET_MODE_BITSIZE (mode);
467 if (width < HOST_BITS_PER_WIDE_INT
468 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
469 != ((HOST_WIDE_INT) (-1) << (width - 1))))
470 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
471 else if (width == HOST_BITS_PER_WIDE_INT
472 && ! (i1 == ~0 && i0 < 0))
473 i1 = 0;
474 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
475 /* We cannot represent this value as a constant. */
476 abort ();
478 /* If this would be an entire word for the target, but is not for
479 the host, then sign-extend on the host so that the number will
480 look the same way on the host that it would on the target.
482 For example, when building a 64 bit alpha hosted 32 bit sparc
483 targeted compiler, then we want the 32 bit unsigned value -1 to be
484 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
485 The latter confuses the sparc backend. */
487 if (width < HOST_BITS_PER_WIDE_INT
488 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
489 i0 |= ((HOST_WIDE_INT) (-1) << width);
491 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
492 CONST_INT.
494 ??? Strictly speaking, this is wrong if we create a CONST_INT for
495 a large unsigned constant with the size of MODE being
496 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
497 in a wider mode. In that case we will mis-interpret it as a
498 negative number.
500 Unfortunately, the only alternative is to make a CONST_DOUBLE for
501 any constant in any mode if it is an unsigned constant larger
502 than the maximum signed integer in an int on the host. However,
503 doing this will break everyone that always expects to see a
504 CONST_INT for SImode and smaller.
506 We have always been making CONST_INTs in this case, so nothing
507 new is being broken. */
509 if (width <= HOST_BITS_PER_WIDE_INT)
510 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
513 /* If this integer fits in one word, return a CONST_INT. */
514 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
515 return GEN_INT (i0);
517 /* We use VOIDmode for integers. */
518 value = rtx_alloc (CONST_DOUBLE);
519 PUT_MODE (value, VOIDmode);
521 CONST_DOUBLE_LOW (value) = i0;
522 CONST_DOUBLE_HIGH (value) = i1;
524 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
525 XWINT (value, i) = 0;
527 return lookup_const_double (value);
531 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
533 /* In case the MD file explicitly references the frame pointer, have
534 all such references point to the same frame pointer. This is
535 used during frame pointer elimination to distinguish the explicit
536 references to these registers from pseudos that happened to be
537 assigned to them.
539 If we have eliminated the frame pointer or arg pointer, we will
540 be using it as a normal register, for example as a spill
541 register. In such cases, we might be accessing it in a mode that
542 is not Pmode and therefore cannot use the pre-allocated rtx.
544 Also don't do this when we are making new REGs in reload, since
545 we don't want to get confused with the real pointers. */
547 if (mode == Pmode && !reload_in_progress)
549 if (regno == FRAME_POINTER_REGNUM
550 && (!reload_completed || frame_pointer_needed))
551 return frame_pointer_rtx;
552 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
553 if (regno == HARD_FRAME_POINTER_REGNUM
554 && (!reload_completed || frame_pointer_needed))
555 return hard_frame_pointer_rtx;
556 #endif
557 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
558 if (regno == ARG_POINTER_REGNUM)
559 return arg_pointer_rtx;
560 #endif
561 #ifdef RETURN_ADDRESS_POINTER_REGNUM
562 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
563 return return_address_pointer_rtx;
564 #endif
565 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
566 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
567 return pic_offset_table_rtx;
568 if (regno == STACK_POINTER_REGNUM)
569 return stack_pointer_rtx;
572 #if 0
573 /* If the per-function register table has been set up, try to re-use
574 an existing entry in that table to avoid useless generation of RTL.
576 This code is disabled for now until we can fix the various backends
577 which depend on having non-shared hard registers in some cases. Long
578 term we want to re-enable this code as it can significantly cut down
579 on the amount of useless RTL that gets generated.
581 We'll also need to fix some code that runs after reload that wants to
582 set ORIGINAL_REGNO. */
584 if (cfun
585 && cfun->emit
586 && regno_reg_rtx
587 && regno < FIRST_PSEUDO_REGISTER
588 && reg_raw_mode[regno] == mode)
589 return regno_reg_rtx[regno];
590 #endif
592 return gen_raw_REG (mode, regno);
596 gen_rtx_MEM (enum machine_mode mode, rtx addr)
598 rtx rt = gen_rtx_raw_MEM (mode, addr);
600 /* This field is not cleared by the mere allocation of the rtx, so
601 we clear it here. */
602 MEM_ATTRS (rt) = 0;
604 return rt;
608 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
610 /* This is the most common failure type.
611 Catch it early so we can see who does it. */
612 if ((offset % GET_MODE_SIZE (mode)) != 0)
613 abort ();
615 /* This check isn't usable right now because combine will
616 throw arbitrary crap like a CALL into a SUBREG in
617 gen_lowpart_for_combine so we must just eat it. */
618 #if 0
619 /* Check for this too. */
620 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
621 abort ();
622 #endif
623 return gen_rtx_raw_SUBREG (mode, reg, offset);
626 /* Generate a SUBREG representing the least-significant part of REG if MODE
627 is smaller than mode of REG, otherwise paradoxical SUBREG. */
630 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
632 enum machine_mode inmode;
634 inmode = GET_MODE (reg);
635 if (inmode == VOIDmode)
636 inmode = mode;
637 return gen_rtx_SUBREG (mode, reg,
638 subreg_lowpart_offset (mode, inmode));
641 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
643 ** This routine generates an RTX of the size specified by
644 ** <code>, which is an RTX code. The RTX structure is initialized
645 ** from the arguments <element1> through <elementn>, which are
646 ** interpreted according to the specific RTX type's format. The
647 ** special machine mode associated with the rtx (if any) is specified
648 ** in <mode>.
650 ** gen_rtx can be invoked in a way which resembles the lisp-like
651 ** rtx it will generate. For example, the following rtx structure:
653 ** (plus:QI (mem:QI (reg:SI 1))
654 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
656 ** ...would be generated by the following C code:
658 ** gen_rtx (PLUS, QImode,
659 ** gen_rtx (MEM, QImode,
660 ** gen_rtx (REG, SImode, 1)),
661 ** gen_rtx (MEM, QImode,
662 ** gen_rtx (PLUS, SImode,
663 ** gen_rtx (REG, SImode, 2),
664 ** gen_rtx (REG, SImode, 3)))),
667 /*VARARGS2*/
669 gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
671 int i; /* Array indices... */
672 const char *fmt; /* Current rtx's format... */
673 rtx rt_val; /* RTX to return to caller... */
674 va_list p;
676 va_start (p, mode);
678 switch (code)
680 case CONST_INT:
681 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
682 break;
684 case CONST_DOUBLE:
686 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
687 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
689 rt_val = immed_double_const (arg0, arg1, mode);
691 break;
693 case REG:
694 rt_val = gen_rtx_REG (mode, va_arg (p, int));
695 break;
697 case MEM:
698 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
699 break;
701 default:
702 rt_val = rtx_alloc (code); /* Allocate the storage space. */
703 rt_val->mode = mode; /* Store the machine mode... */
705 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
706 for (i = 0; i < GET_RTX_LENGTH (code); i++)
708 switch (*fmt++)
710 case '0': /* Field with unknown use. Zero it. */
711 X0EXP (rt_val, i) = NULL_RTX;
712 break;
714 case 'i': /* An integer? */
715 XINT (rt_val, i) = va_arg (p, int);
716 break;
718 case 'w': /* A wide integer? */
719 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
720 break;
722 case 's': /* A string? */
723 XSTR (rt_val, i) = va_arg (p, char *);
724 break;
726 case 'e': /* An expression? */
727 case 'u': /* An insn? Same except when printing. */
728 XEXP (rt_val, i) = va_arg (p, rtx);
729 break;
731 case 'E': /* An RTX vector? */
732 XVEC (rt_val, i) = va_arg (p, rtvec);
733 break;
735 case 'b': /* A bitmap? */
736 XBITMAP (rt_val, i) = va_arg (p, bitmap);
737 break;
739 case 't': /* A tree? */
740 XTREE (rt_val, i) = va_arg (p, tree);
741 break;
743 default:
744 abort ();
747 break;
750 va_end (p);
751 return rt_val;
754 /* gen_rtvec (n, [rt1, ..., rtn])
756 ** This routine creates an rtvec and stores within it the
757 ** pointers to rtx's which are its arguments.
760 /*VARARGS1*/
761 rtvec
762 gen_rtvec (int n, ...)
764 int i, save_n;
765 rtx *vector;
766 va_list p;
768 va_start (p, n);
770 if (n == 0)
771 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
773 vector = alloca (n * sizeof (rtx));
775 for (i = 0; i < n; i++)
776 vector[i] = va_arg (p, rtx);
778 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
779 save_n = n;
780 va_end (p);
782 return gen_rtvec_v (save_n, vector);
785 rtvec
786 gen_rtvec_v (int n, rtx *argp)
788 int i;
789 rtvec rt_val;
791 if (n == 0)
792 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
794 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
796 for (i = 0; i < n; i++)
797 rt_val->elem[i] = *argp++;
799 return rt_val;
802 /* Generate a REG rtx for a new pseudo register of mode MODE.
803 This pseudo is assigned the next sequential register number. */
806 gen_reg_rtx (enum machine_mode mode)
808 struct function *f = cfun;
809 rtx val;
811 /* Don't let anything called after initial flow analysis create new
812 registers. */
813 if (no_new_pseudos)
814 abort ();
816 if (generating_concat_p
817 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
818 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
820 /* For complex modes, don't make a single pseudo.
821 Instead, make a CONCAT of two pseudos.
822 This allows noncontiguous allocation of the real and imaginary parts,
823 which makes much better code. Besides, allocating DCmode
824 pseudos overstrains reload on some machines like the 386. */
825 rtx realpart, imagpart;
826 enum machine_mode partmode = GET_MODE_INNER (mode);
828 realpart = gen_reg_rtx (partmode);
829 imagpart = gen_reg_rtx (partmode);
830 return gen_rtx_CONCAT (mode, realpart, imagpart);
833 /* Make sure regno_pointer_align, and regno_reg_rtx are large
834 enough to have an element for this pseudo reg number. */
836 if (reg_rtx_no == f->emit->regno_pointer_align_length)
838 int old_size = f->emit->regno_pointer_align_length;
839 char *new;
840 rtx *new1;
842 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
843 memset (new + old_size, 0, old_size);
844 f->emit->regno_pointer_align = (unsigned char *) new;
846 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
847 old_size * 2 * sizeof (rtx));
848 memset (new1 + old_size, 0, old_size * sizeof (rtx));
849 regno_reg_rtx = new1;
851 f->emit->regno_pointer_align_length = old_size * 2;
854 val = gen_raw_REG (mode, reg_rtx_no);
855 regno_reg_rtx[reg_rtx_no++] = val;
856 return val;
859 /* Generate a register with same attributes as REG,
860 but offsetted by OFFSET. */
863 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
865 rtx new = gen_rtx_REG (mode, regno);
866 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
867 REG_OFFSET (reg) + offset);
868 return new;
871 /* Set the decl for MEM to DECL. */
873 void
874 set_reg_attrs_from_mem (rtx reg, rtx mem)
876 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
877 REG_ATTRS (reg)
878 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
881 /* Set the register attributes for registers contained in PARM_RTX.
882 Use needed values from memory attributes of MEM. */
884 void
885 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
887 if (GET_CODE (parm_rtx) == REG)
888 set_reg_attrs_from_mem (parm_rtx, mem);
889 else if (GET_CODE (parm_rtx) == PARALLEL)
891 /* Check for a NULL entry in the first slot, used to indicate that the
892 parameter goes both on the stack and in registers. */
893 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
894 for (; i < XVECLEN (parm_rtx, 0); i++)
896 rtx x = XVECEXP (parm_rtx, 0, i);
897 if (GET_CODE (XEXP (x, 0)) == REG)
898 REG_ATTRS (XEXP (x, 0))
899 = get_reg_attrs (MEM_EXPR (mem),
900 INTVAL (XEXP (x, 1)));
905 /* Assign the RTX X to declaration T. */
906 void
907 set_decl_rtl (tree t, rtx x)
909 DECL_CHECK (t)->decl.rtl = x;
911 if (!x)
912 return;
913 /* For register, we maintain the reverse information too. */
914 if (GET_CODE (x) == REG)
915 REG_ATTRS (x) = get_reg_attrs (t, 0);
916 else if (GET_CODE (x) == SUBREG)
917 REG_ATTRS (SUBREG_REG (x))
918 = get_reg_attrs (t, -SUBREG_BYTE (x));
919 if (GET_CODE (x) == CONCAT)
921 if (REG_P (XEXP (x, 0)))
922 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
923 if (REG_P (XEXP (x, 1)))
924 REG_ATTRS (XEXP (x, 1))
925 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
927 if (GET_CODE (x) == PARALLEL)
929 int i;
930 for (i = 0; i < XVECLEN (x, 0); i++)
932 rtx y = XVECEXP (x, 0, i);
933 if (REG_P (XEXP (y, 0)))
934 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
939 /* Identify REG (which may be a CONCAT) as a user register. */
941 void
942 mark_user_reg (rtx reg)
944 if (GET_CODE (reg) == CONCAT)
946 REG_USERVAR_P (XEXP (reg, 0)) = 1;
947 REG_USERVAR_P (XEXP (reg, 1)) = 1;
949 else if (GET_CODE (reg) == REG)
950 REG_USERVAR_P (reg) = 1;
951 else
952 abort ();
955 /* Identify REG as a probable pointer register and show its alignment
956 as ALIGN, if nonzero. */
958 void
959 mark_reg_pointer (rtx reg, int align)
961 if (! REG_POINTER (reg))
963 REG_POINTER (reg) = 1;
965 if (align)
966 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
968 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
969 /* We can no-longer be sure just how aligned this pointer is */
970 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
973 /* Return 1 plus largest pseudo reg number used in the current function. */
976 max_reg_num (void)
978 return reg_rtx_no;
981 /* Return 1 + the largest label number used so far in the current function. */
984 max_label_num (void)
986 if (last_label_num && label_num == base_label_num)
987 return last_label_num;
988 return label_num;
991 /* Return first label number used in this function (if any were used). */
994 get_first_label_num (void)
996 return first_label_num;
999 /* Return the final regno of X, which is a SUBREG of a hard
1000 register. */
1002 subreg_hard_regno (rtx x, int check_mode)
1004 enum machine_mode mode = GET_MODE (x);
1005 unsigned int byte_offset, base_regno, final_regno;
1006 rtx reg = SUBREG_REG (x);
1008 /* This is where we attempt to catch illegal subregs
1009 created by the compiler. */
1010 if (GET_CODE (x) != SUBREG
1011 || GET_CODE (reg) != REG)
1012 abort ();
1013 base_regno = REGNO (reg);
1014 if (base_regno >= FIRST_PSEUDO_REGISTER)
1015 abort ();
1016 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1017 abort ();
1018 #ifdef ENABLE_CHECKING
1019 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1020 SUBREG_BYTE (x), mode))
1021 abort ();
1022 #endif
1023 /* Catch non-congruent offsets too. */
1024 byte_offset = SUBREG_BYTE (x);
1025 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1026 abort ();
1028 final_regno = subreg_regno (x);
1030 return final_regno;
1033 /* Return a value representing some low-order bits of X, where the number
1034 of low-order bits is given by MODE. Note that no conversion is done
1035 between floating-point and fixed-point values, rather, the bit
1036 representation is returned.
1038 This function handles the cases in common between gen_lowpart, below,
1039 and two variants in cse.c and combine.c. These are the cases that can
1040 be safely handled at all points in the compilation.
1042 If this is not a case we can handle, return 0. */
1045 gen_lowpart_common (enum machine_mode mode, rtx x)
1047 int msize = GET_MODE_SIZE (mode);
1048 int xsize = GET_MODE_SIZE (GET_MODE (x));
1049 int offset = 0;
1051 if (GET_MODE (x) == mode)
1052 return x;
1054 /* MODE must occupy no more words than the mode of X. */
1055 if (GET_MODE (x) != VOIDmode
1056 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1057 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1058 return 0;
1060 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1061 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1062 && GET_MODE (x) != VOIDmode && msize > xsize)
1063 return 0;
1065 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1067 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1068 && (GET_MODE_CLASS (mode) == MODE_INT
1069 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1071 /* If we are getting the low-order part of something that has been
1072 sign- or zero-extended, we can either just use the object being
1073 extended or make a narrower extension. If we want an even smaller
1074 piece than the size of the object being extended, call ourselves
1075 recursively.
1077 This case is used mostly by combine and cse. */
1079 if (GET_MODE (XEXP (x, 0)) == mode)
1080 return XEXP (x, 0);
1081 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1082 return gen_lowpart_common (mode, XEXP (x, 0));
1083 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1084 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1086 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1087 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1088 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1089 else if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1090 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1091 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1092 from the low-order part of the constant. */
1093 else if ((GET_MODE_CLASS (mode) == MODE_INT
1094 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1095 && GET_MODE (x) == VOIDmode
1096 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1098 /* If MODE is twice the host word size, X is already the desired
1099 representation. Otherwise, if MODE is wider than a word, we can't
1100 do this. If MODE is exactly a word, return just one CONST_INT. */
1102 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1103 return x;
1104 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1105 return 0;
1106 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1107 return (GET_CODE (x) == CONST_INT ? x
1108 : GEN_INT (CONST_DOUBLE_LOW (x)));
1109 else
1111 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1112 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1113 : CONST_DOUBLE_LOW (x));
1115 /* Sign extend to HOST_WIDE_INT. */
1116 val = trunc_int_for_mode (val, mode);
1118 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1119 : GEN_INT (val));
1123 /* The floating-point emulator can handle all conversions between
1124 FP and integer operands. This simplifies reload because it
1125 doesn't have to deal with constructs like (subreg:DI
1126 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1127 /* Single-precision floats are always 32-bits and double-precision
1128 floats are always 64-bits. */
1130 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1131 && GET_MODE_BITSIZE (mode) == 32
1132 && GET_CODE (x) == CONST_INT)
1134 REAL_VALUE_TYPE r;
1135 long i = INTVAL (x);
1137 real_from_target (&r, &i, mode);
1138 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1140 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1141 && GET_MODE_BITSIZE (mode) == 64
1142 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1143 && GET_MODE (x) == VOIDmode)
1145 REAL_VALUE_TYPE r;
1146 HOST_WIDE_INT low, high;
1147 long i[2];
1149 if (GET_CODE (x) == CONST_INT)
1151 low = INTVAL (x);
1152 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1154 else
1156 low = CONST_DOUBLE_LOW (x);
1157 high = CONST_DOUBLE_HIGH (x);
1160 if (HOST_BITS_PER_WIDE_INT > 32)
1161 high = low >> 31 >> 1;
1163 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1164 target machine. */
1165 if (WORDS_BIG_ENDIAN)
1166 i[0] = high, i[1] = low;
1167 else
1168 i[0] = low, i[1] = high;
1170 real_from_target (&r, i, mode);
1171 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1173 else if ((GET_MODE_CLASS (mode) == MODE_INT
1174 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1175 && GET_CODE (x) == CONST_DOUBLE
1176 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1178 REAL_VALUE_TYPE r;
1179 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1180 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1182 /* Convert 'r' into an array of four 32-bit words in target word
1183 order. */
1184 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1185 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1187 case 32:
1188 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1189 i[1] = 0;
1190 i[2] = 0;
1191 i[3 - 3 * endian] = 0;
1192 break;
1193 case 64:
1194 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1195 i[2 - 2 * endian] = 0;
1196 i[3 - 2 * endian] = 0;
1197 break;
1198 case 96:
1199 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1200 i[3 - 3 * endian] = 0;
1201 break;
1202 case 128:
1203 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1204 break;
1205 default:
1206 abort ();
1208 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1209 and return it. */
1210 #if HOST_BITS_PER_WIDE_INT == 32
1211 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1212 #else
1213 if (HOST_BITS_PER_WIDE_INT != 64)
1214 abort ();
1216 return immed_double_const ((((unsigned long) i[3 * endian])
1217 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1218 (((unsigned long) i[2 - endian])
1219 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1220 mode);
1221 #endif
1223 /* If MODE is a condition code and X is a CONST_INT, the value of X
1224 must already have been "recognized" by the back-end, and we can
1225 assume that it is valid for this mode. */
1226 else if (GET_MODE_CLASS (mode) == MODE_CC
1227 && GET_CODE (x) == CONST_INT)
1228 return x;
1230 /* Otherwise, we can't do this. */
1231 return 0;
1234 /* Return the constant real or imaginary part (which has mode MODE)
1235 of a complex value X. The IMAGPART_P argument determines whether
1236 the real or complex component should be returned. This function
1237 returns NULL_RTX if the component isn't a constant. */
1239 static rtx
1240 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1242 tree decl, part;
1244 if (GET_CODE (x) == MEM
1245 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1247 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1248 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1250 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1251 if (TREE_CODE (part) == REAL_CST
1252 || TREE_CODE (part) == INTEGER_CST)
1253 return expand_expr (part, NULL_RTX, mode, 0);
1256 return NULL_RTX;
1259 /* Return the real part (which has mode MODE) of a complex value X.
1260 This always comes at the low address in memory. */
1263 gen_realpart (enum machine_mode mode, rtx x)
1265 rtx part;
1267 /* Handle complex constants. */
1268 part = gen_complex_constant_part (mode, x, 0);
1269 if (part != NULL_RTX)
1270 return part;
1272 if (WORDS_BIG_ENDIAN
1273 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1274 && REG_P (x)
1275 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1276 internal_error
1277 ("can't access real part of complex value in hard register");
1278 else if (WORDS_BIG_ENDIAN)
1279 return gen_highpart (mode, x);
1280 else
1281 return gen_lowpart (mode, x);
1284 /* Return the imaginary part (which has mode MODE) of a complex value X.
1285 This always comes at the high address in memory. */
1288 gen_imagpart (enum machine_mode mode, rtx x)
1290 rtx part;
1292 /* Handle complex constants. */
1293 part = gen_complex_constant_part (mode, x, 1);
1294 if (part != NULL_RTX)
1295 return part;
1297 if (WORDS_BIG_ENDIAN)
1298 return gen_lowpart (mode, x);
1299 else if (! WORDS_BIG_ENDIAN
1300 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1301 && REG_P (x)
1302 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1303 internal_error
1304 ("can't access imaginary part of complex value in hard register");
1305 else
1306 return gen_highpart (mode, x);
1309 /* Return 1 iff X, assumed to be a SUBREG,
1310 refers to the real part of the complex value in its containing reg.
1311 Complex values are always stored with the real part in the first word,
1312 regardless of WORDS_BIG_ENDIAN. */
1315 subreg_realpart_p (rtx x)
1317 if (GET_CODE (x) != SUBREG)
1318 abort ();
1320 return ((unsigned int) SUBREG_BYTE (x)
1321 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1324 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1325 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1326 least-significant part of X.
1327 MODE specifies how big a part of X to return;
1328 it usually should not be larger than a word.
1329 If X is a MEM whose address is a QUEUED, the value may be so also. */
1332 gen_lowpart (enum machine_mode mode, rtx x)
1334 rtx result = gen_lowpart_common (mode, x);
1336 if (result)
1337 return result;
1338 else if (GET_CODE (x) == REG)
1340 /* Must be a hard reg that's not valid in MODE. */
1341 result = gen_lowpart_common (mode, copy_to_reg (x));
1342 if (result == 0)
1343 abort ();
1344 return result;
1346 else if (GET_CODE (x) == MEM)
1348 /* The only additional case we can do is MEM. */
1349 int offset = 0;
1351 /* The following exposes the use of "x" to CSE. */
1352 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1353 && SCALAR_INT_MODE_P (GET_MODE (x))
1354 && ! no_new_pseudos)
1355 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1357 if (WORDS_BIG_ENDIAN)
1358 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1359 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1361 if (BYTES_BIG_ENDIAN)
1362 /* Adjust the address so that the address-after-the-data
1363 is unchanged. */
1364 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1365 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1367 return adjust_address (x, mode, offset);
1369 else if (GET_CODE (x) == ADDRESSOF)
1370 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1371 else
1372 abort ();
1375 /* Like `gen_lowpart', but refer to the most significant part.
1376 This is used to access the imaginary part of a complex number. */
1379 gen_highpart (enum machine_mode mode, rtx x)
1381 unsigned int msize = GET_MODE_SIZE (mode);
1382 rtx result;
1384 /* This case loses if X is a subreg. To catch bugs early,
1385 complain if an invalid MODE is used even in other cases. */
1386 if (msize > UNITS_PER_WORD
1387 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1388 abort ();
1390 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1391 subreg_highpart_offset (mode, GET_MODE (x)));
1393 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1394 the target if we have a MEM. gen_highpart must return a valid operand,
1395 emitting code if necessary to do so. */
1396 if (result != NULL_RTX && GET_CODE (result) == MEM)
1397 result = validize_mem (result);
1399 if (!result)
1400 abort ();
1401 return result;
1404 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1405 be VOIDmode constant. */
1407 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1409 if (GET_MODE (exp) != VOIDmode)
1411 if (GET_MODE (exp) != innermode)
1412 abort ();
1413 return gen_highpart (outermode, exp);
1415 return simplify_gen_subreg (outermode, exp, innermode,
1416 subreg_highpart_offset (outermode, innermode));
1419 /* Return offset in bytes to get OUTERMODE low part
1420 of the value in mode INNERMODE stored in memory in target format. */
1422 unsigned int
1423 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1425 unsigned int offset = 0;
1426 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1428 if (difference > 0)
1430 if (WORDS_BIG_ENDIAN)
1431 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1432 if (BYTES_BIG_ENDIAN)
1433 offset += difference % UNITS_PER_WORD;
1436 return offset;
1439 /* Return offset in bytes to get OUTERMODE high part
1440 of the value in mode INNERMODE stored in memory in target format. */
1441 unsigned int
1442 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1444 unsigned int offset = 0;
1445 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1447 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1448 abort ();
1450 if (difference > 0)
1452 if (! WORDS_BIG_ENDIAN)
1453 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1454 if (! BYTES_BIG_ENDIAN)
1455 offset += difference % UNITS_PER_WORD;
1458 return offset;
1461 /* Return 1 iff X, assumed to be a SUBREG,
1462 refers to the least significant part of its containing reg.
1463 If X is not a SUBREG, always return 1 (it is its own low part!). */
1466 subreg_lowpart_p (rtx x)
1468 if (GET_CODE (x) != SUBREG)
1469 return 1;
1470 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1471 return 0;
1473 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1474 == SUBREG_BYTE (x));
1478 /* Helper routine for all the constant cases of operand_subword.
1479 Some places invoke this directly. */
1482 constant_subword (rtx op, int offset, enum machine_mode mode)
1484 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1485 HOST_WIDE_INT val;
1487 /* If OP is already an integer word, return it. */
1488 if (GET_MODE_CLASS (mode) == MODE_INT
1489 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1490 return op;
1492 /* The output is some bits, the width of the target machine's word.
1493 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1494 host can't. */
1495 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1496 && GET_MODE_CLASS (mode) == MODE_FLOAT
1497 && GET_MODE_BITSIZE (mode) == 64
1498 && GET_CODE (op) == CONST_DOUBLE)
1500 long k[2];
1501 REAL_VALUE_TYPE rv;
1503 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1504 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1506 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1507 which the words are written depends on the word endianness.
1508 ??? This is a potential portability problem and should
1509 be fixed at some point.
1511 We must exercise caution with the sign bit. By definition there
1512 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1513 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1514 So we explicitly mask and sign-extend as necessary. */
1515 if (BITS_PER_WORD == 32)
1517 val = k[offset];
1518 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1519 return GEN_INT (val);
1521 #if HOST_BITS_PER_WIDE_INT >= 64
1522 else if (BITS_PER_WORD >= 64 && offset == 0)
1524 val = k[! WORDS_BIG_ENDIAN];
1525 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1526 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1527 return GEN_INT (val);
1529 #endif
1530 else if (BITS_PER_WORD == 16)
1532 val = k[offset >> 1];
1533 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1534 val >>= 16;
1535 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1536 return GEN_INT (val);
1538 else
1539 abort ();
1541 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1542 && GET_MODE_CLASS (mode) == MODE_FLOAT
1543 && GET_MODE_BITSIZE (mode) > 64
1544 && GET_CODE (op) == CONST_DOUBLE)
1546 long k[4];
1547 REAL_VALUE_TYPE rv;
1549 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1550 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1552 if (BITS_PER_WORD == 32)
1554 val = k[offset];
1555 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1556 return GEN_INT (val);
1558 #if HOST_BITS_PER_WIDE_INT >= 64
1559 else if (BITS_PER_WORD >= 64 && offset <= 1)
1561 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1562 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1563 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1564 return GEN_INT (val);
1566 #endif
1567 else
1568 abort ();
1571 /* Single word float is a little harder, since single- and double-word
1572 values often do not have the same high-order bits. We have already
1573 verified that we want the only defined word of the single-word value. */
1574 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1575 && GET_MODE_BITSIZE (mode) == 32
1576 && GET_CODE (op) == CONST_DOUBLE)
1578 long l;
1579 REAL_VALUE_TYPE rv;
1581 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1582 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1584 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1585 val = l;
1586 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1588 if (BITS_PER_WORD == 16)
1590 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1591 val >>= 16;
1592 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1595 return GEN_INT (val);
1598 /* The only remaining cases that we can handle are integers.
1599 Convert to proper endianness now since these cases need it.
1600 At this point, offset == 0 means the low-order word.
1602 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1603 in general. However, if OP is (const_int 0), we can just return
1604 it for any word. */
1606 if (op == const0_rtx)
1607 return op;
1609 if (GET_MODE_CLASS (mode) != MODE_INT
1610 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1611 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1612 return 0;
1614 if (WORDS_BIG_ENDIAN)
1615 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1617 /* Find out which word on the host machine this value is in and get
1618 it from the constant. */
1619 val = (offset / size_ratio == 0
1620 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1621 : (GET_CODE (op) == CONST_INT
1622 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1624 /* Get the value we want into the low bits of val. */
1625 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1626 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1628 val = trunc_int_for_mode (val, word_mode);
1630 return GEN_INT (val);
1633 /* Return subword OFFSET of operand OP.
1634 The word number, OFFSET, is interpreted as the word number starting
1635 at the low-order address. OFFSET 0 is the low-order word if not
1636 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1638 If we cannot extract the required word, we return zero. Otherwise,
1639 an rtx corresponding to the requested word will be returned.
1641 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1642 reload has completed, a valid address will always be returned. After
1643 reload, if a valid address cannot be returned, we return zero.
1645 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1646 it is the responsibility of the caller.
1648 MODE is the mode of OP in case it is a CONST_INT.
1650 ??? This is still rather broken for some cases. The problem for the
1651 moment is that all callers of this thing provide no 'goal mode' to
1652 tell us to work with. This exists because all callers were written
1653 in a word based SUBREG world.
1654 Now use of this function can be deprecated by simplify_subreg in most
1655 cases.
1659 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1661 if (mode == VOIDmode)
1662 mode = GET_MODE (op);
1664 if (mode == VOIDmode)
1665 abort ();
1667 /* If OP is narrower than a word, fail. */
1668 if (mode != BLKmode
1669 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1670 return 0;
1672 /* If we want a word outside OP, return zero. */
1673 if (mode != BLKmode
1674 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1675 return const0_rtx;
1677 /* Form a new MEM at the requested address. */
1678 if (GET_CODE (op) == MEM)
1680 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1682 if (! validate_address)
1683 return new;
1685 else if (reload_completed)
1687 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1688 return 0;
1690 else
1691 return replace_equiv_address (new, XEXP (new, 0));
1694 /* Rest can be handled by simplify_subreg. */
1695 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1698 /* Similar to `operand_subword', but never return 0. If we can't extract
1699 the required subword, put OP into a register and try again. If that fails,
1700 abort. We always validate the address in this case.
1702 MODE is the mode of OP, in case it is CONST_INT. */
1705 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1707 rtx result = operand_subword (op, offset, 1, mode);
1709 if (result)
1710 return result;
1712 if (mode != BLKmode && mode != VOIDmode)
1714 /* If this is a register which can not be accessed by words, copy it
1715 to a pseudo register. */
1716 if (GET_CODE (op) == REG)
1717 op = copy_to_reg (op);
1718 else
1719 op = force_reg (mode, op);
1722 result = operand_subword (op, offset, 1, mode);
1723 if (result == 0)
1724 abort ();
1726 return result;
1729 /* Given a compare instruction, swap the operands.
1730 A test instruction is changed into a compare of 0 against the operand. */
1732 void
1733 reverse_comparison (rtx insn)
1735 rtx body = PATTERN (insn);
1736 rtx comp;
1738 if (GET_CODE (body) == SET)
1739 comp = SET_SRC (body);
1740 else
1741 comp = SET_SRC (XVECEXP (body, 0, 0));
1743 if (GET_CODE (comp) == COMPARE)
1745 rtx op0 = XEXP (comp, 0);
1746 rtx op1 = XEXP (comp, 1);
1747 XEXP (comp, 0) = op1;
1748 XEXP (comp, 1) = op0;
1750 else
1752 rtx new = gen_rtx_COMPARE (VOIDmode,
1753 CONST0_RTX (GET_MODE (comp)), comp);
1754 if (GET_CODE (body) == SET)
1755 SET_SRC (body) = new;
1756 else
1757 SET_SRC (XVECEXP (body, 0, 0)) = new;
1761 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1762 or (2) a component ref of something variable. Represent the later with
1763 a NULL expression. */
1765 static tree
1766 component_ref_for_mem_expr (tree ref)
1768 tree inner = TREE_OPERAND (ref, 0);
1770 if (TREE_CODE (inner) == COMPONENT_REF)
1771 inner = component_ref_for_mem_expr (inner);
1772 else
1774 tree placeholder_ptr = 0;
1776 /* Now remove any conversions: they don't change what the underlying
1777 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1778 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1779 || TREE_CODE (inner) == NON_LVALUE_EXPR
1780 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1781 || TREE_CODE (inner) == SAVE_EXPR
1782 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1783 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1784 inner = find_placeholder (inner, &placeholder_ptr);
1785 else
1786 inner = TREE_OPERAND (inner, 0);
1788 if (! DECL_P (inner))
1789 inner = NULL_TREE;
1792 if (inner == TREE_OPERAND (ref, 0))
1793 return ref;
1794 else
1795 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1796 TREE_OPERAND (ref, 1));
1799 /* Given REF, a MEM, and T, either the type of X or the expression
1800 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1801 if we are making a new object of this type. BITPOS is nonzero if
1802 there is an offset outstanding on T that will be applied later. */
1804 void
1805 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1806 HOST_WIDE_INT bitpos)
1808 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1809 tree expr = MEM_EXPR (ref);
1810 rtx offset = MEM_OFFSET (ref);
1811 rtx size = MEM_SIZE (ref);
1812 unsigned int align = MEM_ALIGN (ref);
1813 HOST_WIDE_INT apply_bitpos = 0;
1814 tree type;
1816 /* It can happen that type_for_mode was given a mode for which there
1817 is no language-level type. In which case it returns NULL, which
1818 we can see here. */
1819 if (t == NULL_TREE)
1820 return;
1822 type = TYPE_P (t) ? t : TREE_TYPE (t);
1824 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1825 wrong answer, as it assumes that DECL_RTL already has the right alias
1826 info. Callers should not set DECL_RTL until after the call to
1827 set_mem_attributes. */
1828 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1829 abort ();
1831 /* Get the alias set from the expression or type (perhaps using a
1832 front-end routine) and use it. */
1833 alias = get_alias_set (t);
1835 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1836 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1837 RTX_UNCHANGING_P (ref)
1838 |= ((lang_hooks.honor_readonly
1839 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1840 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1842 /* If we are making an object of this type, or if this is a DECL, we know
1843 that it is a scalar if the type is not an aggregate. */
1844 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1845 MEM_SCALAR_P (ref) = 1;
1847 /* We can set the alignment from the type if we are making an object,
1848 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1849 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1850 align = MAX (align, TYPE_ALIGN (type));
1852 /* If the size is known, we can set that. */
1853 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1854 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1856 /* If T is not a type, we may be able to deduce some more information about
1857 the expression. */
1858 if (! TYPE_P (t))
1860 maybe_set_unchanging (ref, t);
1861 if (TREE_THIS_VOLATILE (t))
1862 MEM_VOLATILE_P (ref) = 1;
1864 /* Now remove any conversions: they don't change what the underlying
1865 object is. Likewise for SAVE_EXPR. */
1866 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1867 || TREE_CODE (t) == NON_LVALUE_EXPR
1868 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1869 || TREE_CODE (t) == SAVE_EXPR)
1870 t = TREE_OPERAND (t, 0);
1872 /* If this expression can't be addressed (e.g., it contains a reference
1873 to a non-addressable field), show we don't change its alias set. */
1874 if (! can_address_p (t))
1875 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1877 /* If this is a decl, set the attributes of the MEM from it. */
1878 if (DECL_P (t))
1880 expr = t;
1881 offset = const0_rtx;
1882 apply_bitpos = bitpos;
1883 size = (DECL_SIZE_UNIT (t)
1884 && host_integerp (DECL_SIZE_UNIT (t), 1)
1885 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1886 align = DECL_ALIGN (t);
1889 /* If this is a constant, we know the alignment. */
1890 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1892 align = TYPE_ALIGN (type);
1893 #ifdef CONSTANT_ALIGNMENT
1894 align = CONSTANT_ALIGNMENT (t, align);
1895 #endif
1898 /* If this is a field reference and not a bit-field, record it. */
1899 /* ??? There is some information that can be gleened from bit-fields,
1900 such as the word offset in the structure that might be modified.
1901 But skip it for now. */
1902 else if (TREE_CODE (t) == COMPONENT_REF
1903 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1905 expr = component_ref_for_mem_expr (t);
1906 offset = const0_rtx;
1907 apply_bitpos = bitpos;
1908 /* ??? Any reason the field size would be different than
1909 the size we got from the type? */
1912 /* If this is an array reference, look for an outer field reference. */
1913 else if (TREE_CODE (t) == ARRAY_REF)
1915 tree off_tree = size_zero_node;
1916 /* We can't modify t, because we use it at the end of the
1917 function. */
1918 tree t2 = t;
1922 tree index = TREE_OPERAND (t2, 1);
1923 tree array = TREE_OPERAND (t2, 0);
1924 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1925 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1926 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1928 /* We assume all arrays have sizes that are a multiple of a byte.
1929 First subtract the lower bound, if any, in the type of the
1930 index, then convert to sizetype and multiply by the size of the
1931 array element. */
1932 if (low_bound != 0 && ! integer_zerop (low_bound))
1933 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1934 index, low_bound));
1936 /* If the index has a self-referential type, pass it to a
1937 WITH_RECORD_EXPR; if the component size is, pass our
1938 component to one. */
1939 if (CONTAINS_PLACEHOLDER_P (index))
1940 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1941 if (CONTAINS_PLACEHOLDER_P (unit_size))
1942 unit_size = build (WITH_RECORD_EXPR, sizetype,
1943 unit_size, array);
1945 off_tree
1946 = fold (build (PLUS_EXPR, sizetype,
1947 fold (build (MULT_EXPR, sizetype,
1948 index,
1949 unit_size)),
1950 off_tree));
1951 t2 = TREE_OPERAND (t2, 0);
1953 while (TREE_CODE (t2) == ARRAY_REF);
1955 if (DECL_P (t2))
1957 expr = t2;
1958 offset = NULL;
1959 if (host_integerp (off_tree, 1))
1961 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1962 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1963 align = DECL_ALIGN (t2);
1964 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1965 align = aoff;
1966 offset = GEN_INT (ioff);
1967 apply_bitpos = bitpos;
1970 else if (TREE_CODE (t2) == COMPONENT_REF)
1972 expr = component_ref_for_mem_expr (t2);
1973 if (host_integerp (off_tree, 1))
1975 offset = GEN_INT (tree_low_cst (off_tree, 1));
1976 apply_bitpos = bitpos;
1978 /* ??? Any reason the field size would be different than
1979 the size we got from the type? */
1981 else if (flag_argument_noalias > 1
1982 && TREE_CODE (t2) == INDIRECT_REF
1983 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1985 expr = t2;
1986 offset = NULL;
1990 /* If this is a Fortran indirect argument reference, record the
1991 parameter decl. */
1992 else if (flag_argument_noalias > 1
1993 && TREE_CODE (t) == INDIRECT_REF
1994 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1996 expr = t;
1997 offset = NULL;
2001 /* If we modified OFFSET based on T, then subtract the outstanding
2002 bit position offset. Similarly, increase the size of the accessed
2003 object to contain the negative offset. */
2004 if (apply_bitpos)
2006 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2007 if (size)
2008 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2011 /* Now set the attributes we computed above. */
2012 MEM_ATTRS (ref)
2013 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2015 /* If this is already known to be a scalar or aggregate, we are done. */
2016 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2017 return;
2019 /* If it is a reference into an aggregate, this is part of an aggregate.
2020 Otherwise we don't know. */
2021 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2022 || TREE_CODE (t) == ARRAY_RANGE_REF
2023 || TREE_CODE (t) == BIT_FIELD_REF)
2024 MEM_IN_STRUCT_P (ref) = 1;
2027 void
2028 set_mem_attributes (rtx ref, tree t, int objectp)
2030 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2033 /* Set the decl for MEM to DECL. */
2035 void
2036 set_mem_attrs_from_reg (rtx mem, rtx reg)
2038 MEM_ATTRS (mem)
2039 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2040 GEN_INT (REG_OFFSET (reg)),
2041 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2044 /* Set the alias set of MEM to SET. */
2046 void
2047 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
2049 #ifdef ENABLE_CHECKING
2050 /* If the new and old alias sets don't conflict, something is wrong. */
2051 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2052 abort ();
2053 #endif
2055 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2056 MEM_SIZE (mem), MEM_ALIGN (mem),
2057 GET_MODE (mem));
2060 /* Set the alignment of MEM to ALIGN bits. */
2062 void
2063 set_mem_align (rtx mem, unsigned int align)
2065 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2066 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2067 GET_MODE (mem));
2070 /* Set the expr for MEM to EXPR. */
2072 void
2073 set_mem_expr (rtx mem, tree expr)
2075 MEM_ATTRS (mem)
2076 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2077 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2080 /* Set the offset of MEM to OFFSET. */
2082 void
2083 set_mem_offset (rtx mem, rtx offset)
2085 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2086 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2087 GET_MODE (mem));
2090 /* Set the size of MEM to SIZE. */
2092 void
2093 set_mem_size (rtx mem, rtx size)
2095 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2096 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2097 GET_MODE (mem));
2100 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2101 and its address changed to ADDR. (VOIDmode means don't change the mode.
2102 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2103 returned memory location is required to be valid. The memory
2104 attributes are not changed. */
2106 static rtx
2107 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
2109 rtx new;
2111 if (GET_CODE (memref) != MEM)
2112 abort ();
2113 if (mode == VOIDmode)
2114 mode = GET_MODE (memref);
2115 if (addr == 0)
2116 addr = XEXP (memref, 0);
2118 if (validate)
2120 if (reload_in_progress || reload_completed)
2122 if (! memory_address_p (mode, addr))
2123 abort ();
2125 else
2126 addr = memory_address (mode, addr);
2129 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2130 return memref;
2132 new = gen_rtx_MEM (mode, addr);
2133 MEM_COPY_ATTRIBUTES (new, memref);
2134 return new;
2137 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2138 way we are changing MEMREF, so we only preserve the alias set. */
2141 change_address (rtx memref, enum machine_mode mode, rtx addr)
2143 rtx new = change_address_1 (memref, mode, addr, 1);
2144 enum machine_mode mmode = GET_MODE (new);
2146 MEM_ATTRS (new)
2147 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2148 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2149 (mmode == BLKmode ? BITS_PER_UNIT
2150 : GET_MODE_ALIGNMENT (mmode)),
2151 mmode);
2153 return new;
2156 /* Return a memory reference like MEMREF, but with its mode changed
2157 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2158 nonzero, the memory address is forced to be valid.
2159 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2160 and caller is responsible for adjusting MEMREF base register. */
2163 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2164 int validate, int adjust)
2166 rtx addr = XEXP (memref, 0);
2167 rtx new;
2168 rtx memoffset = MEM_OFFSET (memref);
2169 rtx size = 0;
2170 unsigned int memalign = MEM_ALIGN (memref);
2172 /* ??? Prefer to create garbage instead of creating shared rtl.
2173 This may happen even if offset is nonzero -- consider
2174 (plus (plus reg reg) const_int) -- so do this always. */
2175 addr = copy_rtx (addr);
2177 if (adjust)
2179 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2180 object, we can merge it into the LO_SUM. */
2181 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2182 && offset >= 0
2183 && (unsigned HOST_WIDE_INT) offset
2184 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2185 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2186 plus_constant (XEXP (addr, 1), offset));
2187 else
2188 addr = plus_constant (addr, offset);
2191 new = change_address_1 (memref, mode, addr, validate);
2193 /* Compute the new values of the memory attributes due to this adjustment.
2194 We add the offsets and update the alignment. */
2195 if (memoffset)
2196 memoffset = GEN_INT (offset + INTVAL (memoffset));
2198 /* Compute the new alignment by taking the MIN of the alignment and the
2199 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2200 if zero. */
2201 if (offset != 0)
2202 memalign
2203 = MIN (memalign,
2204 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2206 /* We can compute the size in a number of ways. */
2207 if (GET_MODE (new) != BLKmode)
2208 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2209 else if (MEM_SIZE (memref))
2210 size = plus_constant (MEM_SIZE (memref), -offset);
2212 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2213 memoffset, size, memalign, GET_MODE (new));
2215 /* At some point, we should validate that this offset is within the object,
2216 if all the appropriate values are known. */
2217 return new;
2220 /* Return a memory reference like MEMREF, but with its mode changed
2221 to MODE and its address changed to ADDR, which is assumed to be
2222 MEMREF offseted by OFFSET bytes. If VALIDATE is
2223 nonzero, the memory address is forced to be valid. */
2226 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2227 HOST_WIDE_INT offset, int validate)
2229 memref = change_address_1 (memref, VOIDmode, addr, validate);
2230 return adjust_address_1 (memref, mode, offset, validate, 0);
2233 /* Return a memory reference like MEMREF, but whose address is changed by
2234 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2235 known to be in OFFSET (possibly 1). */
2238 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2240 rtx new, addr = XEXP (memref, 0);
2242 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2244 /* At this point we don't know _why_ the address is invalid. It
2245 could have secondary memory references, multiplies or anything.
2247 However, if we did go and rearrange things, we can wind up not
2248 being able to recognize the magic around pic_offset_table_rtx.
2249 This stuff is fragile, and is yet another example of why it is
2250 bad to expose PIC machinery too early. */
2251 if (! memory_address_p (GET_MODE (memref), new)
2252 && GET_CODE (addr) == PLUS
2253 && XEXP (addr, 0) == pic_offset_table_rtx)
2255 addr = force_reg (GET_MODE (addr), addr);
2256 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2259 update_temp_slot_address (XEXP (memref, 0), new);
2260 new = change_address_1 (memref, VOIDmode, new, 1);
2262 /* Update the alignment to reflect the offset. Reset the offset, which
2263 we don't know. */
2264 MEM_ATTRS (new)
2265 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2266 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2267 GET_MODE (new));
2268 return new;
2271 /* Return a memory reference like MEMREF, but with its address changed to
2272 ADDR. The caller is asserting that the actual piece of memory pointed
2273 to is the same, just the form of the address is being changed, such as
2274 by putting something into a register. */
2277 replace_equiv_address (rtx memref, rtx addr)
2279 /* change_address_1 copies the memory attribute structure without change
2280 and that's exactly what we want here. */
2281 update_temp_slot_address (XEXP (memref, 0), addr);
2282 return change_address_1 (memref, VOIDmode, addr, 1);
2285 /* Likewise, but the reference is not required to be valid. */
2288 replace_equiv_address_nv (rtx memref, rtx addr)
2290 return change_address_1 (memref, VOIDmode, addr, 0);
2293 /* Return a memory reference like MEMREF, but with its mode widened to
2294 MODE and offset by OFFSET. This would be used by targets that e.g.
2295 cannot issue QImode memory operations and have to use SImode memory
2296 operations plus masking logic. */
2299 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2301 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2302 tree expr = MEM_EXPR (new);
2303 rtx memoffset = MEM_OFFSET (new);
2304 unsigned int size = GET_MODE_SIZE (mode);
2306 /* If we don't know what offset we were at within the expression, then
2307 we can't know if we've overstepped the bounds. */
2308 if (! memoffset)
2309 expr = NULL_TREE;
2311 while (expr)
2313 if (TREE_CODE (expr) == COMPONENT_REF)
2315 tree field = TREE_OPERAND (expr, 1);
2317 if (! DECL_SIZE_UNIT (field))
2319 expr = NULL_TREE;
2320 break;
2323 /* Is the field at least as large as the access? If so, ok,
2324 otherwise strip back to the containing structure. */
2325 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2326 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2327 && INTVAL (memoffset) >= 0)
2328 break;
2330 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2332 expr = NULL_TREE;
2333 break;
2336 expr = TREE_OPERAND (expr, 0);
2337 memoffset = (GEN_INT (INTVAL (memoffset)
2338 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2339 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2340 / BITS_PER_UNIT)));
2342 /* Similarly for the decl. */
2343 else if (DECL_P (expr)
2344 && DECL_SIZE_UNIT (expr)
2345 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2346 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2347 && (! memoffset || INTVAL (memoffset) >= 0))
2348 break;
2349 else
2351 /* The widened memory access overflows the expression, which means
2352 that it could alias another expression. Zap it. */
2353 expr = NULL_TREE;
2354 break;
2358 if (! expr)
2359 memoffset = NULL_RTX;
2361 /* The widened memory may alias other stuff, so zap the alias set. */
2362 /* ??? Maybe use get_alias_set on any remaining expression. */
2364 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2365 MEM_ALIGN (new), mode);
2367 return new;
2370 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2373 gen_label_rtx (void)
2375 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2376 NULL, label_num++, NULL);
2379 /* For procedure integration. */
2381 /* Install new pointers to the first and last insns in the chain.
2382 Also, set cur_insn_uid to one higher than the last in use.
2383 Used for an inline-procedure after copying the insn chain. */
2385 void
2386 set_new_first_and_last_insn (rtx first, rtx last)
2388 rtx insn;
2390 first_insn = first;
2391 last_insn = last;
2392 cur_insn_uid = 0;
2394 for (insn = first; insn; insn = NEXT_INSN (insn))
2395 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2397 cur_insn_uid++;
2400 /* Set the range of label numbers found in the current function.
2401 This is used when belatedly compiling an inline function. */
2403 void
2404 set_new_first_and_last_label_num (int first, int last)
2406 base_label_num = label_num;
2407 first_label_num = first;
2408 last_label_num = last;
2411 /* Set the last label number found in the current function.
2412 This is used when belatedly compiling an inline function. */
2414 void
2415 set_new_last_label_num (int last)
2417 base_label_num = label_num;
2418 last_label_num = last;
2421 /* Restore all variables describing the current status from the structure *P.
2422 This is used after a nested function. */
2424 void
2425 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2427 last_label_num = 0;
2430 /* Go through all the RTL insn bodies and copy any invalid shared
2431 structure. This routine should only be called once. */
2433 void
2434 unshare_all_rtl (tree fndecl, rtx insn)
2436 tree decl;
2438 /* Make sure that virtual parameters are not shared. */
2439 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2440 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2442 /* Make sure that virtual stack slots are not shared. */
2443 unshare_all_decls (DECL_INITIAL (fndecl));
2445 /* Unshare just about everything else. */
2446 unshare_all_rtl_1 (insn);
2448 /* Make sure the addresses of stack slots found outside the insn chain
2449 (such as, in DECL_RTL of a variable) are not shared
2450 with the insn chain.
2452 This special care is necessary when the stack slot MEM does not
2453 actually appear in the insn chain. If it does appear, its address
2454 is unshared from all else at that point. */
2455 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2458 /* Go through all the RTL insn bodies and copy any invalid shared
2459 structure, again. This is a fairly expensive thing to do so it
2460 should be done sparingly. */
2462 void
2463 unshare_all_rtl_again (rtx insn)
2465 rtx p;
2466 tree decl;
2468 for (p = insn; p; p = NEXT_INSN (p))
2469 if (INSN_P (p))
2471 reset_used_flags (PATTERN (p));
2472 reset_used_flags (REG_NOTES (p));
2473 reset_used_flags (LOG_LINKS (p));
2476 /* Make sure that virtual stack slots are not shared. */
2477 reset_used_decls (DECL_INITIAL (cfun->decl));
2479 /* Make sure that virtual parameters are not shared. */
2480 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2481 reset_used_flags (DECL_RTL (decl));
2483 reset_used_flags (stack_slot_list);
2485 unshare_all_rtl (cfun->decl, insn);
2488 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2489 Assumes the mark bits are cleared at entry. */
2491 static void
2492 unshare_all_rtl_1 (rtx insn)
2494 for (; insn; insn = NEXT_INSN (insn))
2495 if (INSN_P (insn))
2497 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2498 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2499 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2503 /* Go through all virtual stack slots of a function and copy any
2504 shared structure. */
2505 static void
2506 unshare_all_decls (tree blk)
2508 tree t;
2510 /* Copy shared decls. */
2511 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2512 if (DECL_RTL_SET_P (t))
2513 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2515 /* Now process sub-blocks. */
2516 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2517 unshare_all_decls (t);
2520 /* Go through all virtual stack slots of a function and mark them as
2521 not shared. */
2522 static void
2523 reset_used_decls (tree blk)
2525 tree t;
2527 /* Mark decls. */
2528 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2529 if (DECL_RTL_SET_P (t))
2530 reset_used_flags (DECL_RTL (t));
2532 /* Now process sub-blocks. */
2533 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2534 reset_used_decls (t);
2537 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2538 placed in the result directly, rather than being copied. MAY_SHARE is
2539 either a MEM of an EXPR_LIST of MEMs. */
2542 copy_most_rtx (rtx orig, rtx may_share)
2544 rtx copy;
2545 int i, j;
2546 RTX_CODE code;
2547 const char *format_ptr;
2549 if (orig == may_share
2550 || (GET_CODE (may_share) == EXPR_LIST
2551 && in_expr_list_p (may_share, orig)))
2552 return orig;
2554 code = GET_CODE (orig);
2556 switch (code)
2558 case REG:
2559 case QUEUED:
2560 case CONST_INT:
2561 case CONST_DOUBLE:
2562 case CONST_VECTOR:
2563 case SYMBOL_REF:
2564 case CODE_LABEL:
2565 case PC:
2566 case CC0:
2567 return orig;
2568 default:
2569 break;
2572 copy = rtx_alloc (code);
2573 PUT_MODE (copy, GET_MODE (orig));
2574 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2575 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2576 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2577 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2578 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2580 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2582 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2584 switch (*format_ptr++)
2586 case 'e':
2587 XEXP (copy, i) = XEXP (orig, i);
2588 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2589 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2590 break;
2592 case 'u':
2593 XEXP (copy, i) = XEXP (orig, i);
2594 break;
2596 case 'E':
2597 case 'V':
2598 XVEC (copy, i) = XVEC (orig, i);
2599 if (XVEC (orig, i) != NULL)
2601 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2602 for (j = 0; j < XVECLEN (copy, i); j++)
2603 XVECEXP (copy, i, j)
2604 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2606 break;
2608 case 'w':
2609 XWINT (copy, i) = XWINT (orig, i);
2610 break;
2612 case 'n':
2613 case 'i':
2614 XINT (copy, i) = XINT (orig, i);
2615 break;
2617 case 't':
2618 XTREE (copy, i) = XTREE (orig, i);
2619 break;
2621 case 's':
2622 case 'S':
2623 XSTR (copy, i) = XSTR (orig, i);
2624 break;
2626 case '0':
2627 /* Copy this through the wide int field; that's safest. */
2628 X0WINT (copy, i) = X0WINT (orig, i);
2629 break;
2631 default:
2632 abort ();
2635 return copy;
2638 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2639 Recursively does the same for subexpressions. */
2642 copy_rtx_if_shared (rtx orig)
2644 rtx x = orig;
2645 int i;
2646 enum rtx_code code;
2647 const char *format_ptr;
2648 int copied = 0;
2650 if (x == 0)
2651 return 0;
2653 code = GET_CODE (x);
2655 /* These types may be freely shared. */
2657 switch (code)
2659 case REG:
2660 case QUEUED:
2661 case CONST_INT:
2662 case CONST_DOUBLE:
2663 case CONST_VECTOR:
2664 case SYMBOL_REF:
2665 case CODE_LABEL:
2666 case PC:
2667 case CC0:
2668 case SCRATCH:
2669 /* SCRATCH must be shared because they represent distinct values. */
2670 return x;
2672 case CONST:
2673 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2674 a LABEL_REF, it isn't sharable. */
2675 if (GET_CODE (XEXP (x, 0)) == PLUS
2676 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2677 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2678 return x;
2679 break;
2681 case INSN:
2682 case JUMP_INSN:
2683 case CALL_INSN:
2684 case NOTE:
2685 case BARRIER:
2686 /* The chain of insns is not being copied. */
2687 return x;
2689 case MEM:
2690 /* A MEM is allowed to be shared if its address is constant.
2692 We used to allow sharing of MEMs which referenced
2693 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2694 that can lose. instantiate_virtual_regs will not unshare
2695 the MEMs, and combine may change the structure of the address
2696 because it looks safe and profitable in one context, but
2697 in some other context it creates unrecognizable RTL. */
2698 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2699 return x;
2701 break;
2703 default:
2704 break;
2707 /* This rtx may not be shared. If it has already been seen,
2708 replace it with a copy of itself. */
2710 if (RTX_FLAG (x, used))
2712 rtx copy;
2714 copy = rtx_alloc (code);
2715 memcpy (copy, x,
2716 (sizeof (*copy) - sizeof (copy->fld)
2717 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2718 x = copy;
2719 copied = 1;
2721 RTX_FLAG (x, used) = 1;
2723 /* Now scan the subexpressions recursively.
2724 We can store any replaced subexpressions directly into X
2725 since we know X is not shared! Any vectors in X
2726 must be copied if X was copied. */
2728 format_ptr = GET_RTX_FORMAT (code);
2730 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2732 switch (*format_ptr++)
2734 case 'e':
2735 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2736 break;
2738 case 'E':
2739 if (XVEC (x, i) != NULL)
2741 int j;
2742 int len = XVECLEN (x, i);
2744 if (copied && len > 0)
2745 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2746 for (j = 0; j < len; j++)
2747 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2749 break;
2752 return x;
2755 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2756 to look for shared sub-parts. */
2758 void
2759 reset_used_flags (rtx x)
2761 int i, j;
2762 enum rtx_code code;
2763 const char *format_ptr;
2765 if (x == 0)
2766 return;
2768 code = GET_CODE (x);
2770 /* These types may be freely shared so we needn't do any resetting
2771 for them. */
2773 switch (code)
2775 case REG:
2776 case QUEUED:
2777 case CONST_INT:
2778 case CONST_DOUBLE:
2779 case CONST_VECTOR:
2780 case SYMBOL_REF:
2781 case CODE_LABEL:
2782 case PC:
2783 case CC0:
2784 return;
2786 case INSN:
2787 case JUMP_INSN:
2788 case CALL_INSN:
2789 case NOTE:
2790 case LABEL_REF:
2791 case BARRIER:
2792 /* The chain of insns is not being copied. */
2793 return;
2795 default:
2796 break;
2799 RTX_FLAG (x, used) = 0;
2801 format_ptr = GET_RTX_FORMAT (code);
2802 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2804 switch (*format_ptr++)
2806 case 'e':
2807 reset_used_flags (XEXP (x, i));
2808 break;
2810 case 'E':
2811 for (j = 0; j < XVECLEN (x, i); j++)
2812 reset_used_flags (XVECEXP (x, i, j));
2813 break;
2818 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2819 Return X or the rtx for the pseudo reg the value of X was copied into.
2820 OTHER must be valid as a SET_DEST. */
2823 make_safe_from (rtx x, rtx other)
2825 while (1)
2826 switch (GET_CODE (other))
2828 case SUBREG:
2829 other = SUBREG_REG (other);
2830 break;
2831 case STRICT_LOW_PART:
2832 case SIGN_EXTEND:
2833 case ZERO_EXTEND:
2834 other = XEXP (other, 0);
2835 break;
2836 default:
2837 goto done;
2839 done:
2840 if ((GET_CODE (other) == MEM
2841 && ! CONSTANT_P (x)
2842 && GET_CODE (x) != REG
2843 && GET_CODE (x) != SUBREG)
2844 || (GET_CODE (other) == REG
2845 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2846 || reg_mentioned_p (other, x))))
2848 rtx temp = gen_reg_rtx (GET_MODE (x));
2849 emit_move_insn (temp, x);
2850 return temp;
2852 return x;
2855 /* Emission of insns (adding them to the doubly-linked list). */
2857 /* Return the first insn of the current sequence or current function. */
2860 get_insns (void)
2862 return first_insn;
2865 /* Specify a new insn as the first in the chain. */
2867 void
2868 set_first_insn (rtx insn)
2870 if (PREV_INSN (insn) != 0)
2871 abort ();
2872 first_insn = insn;
2875 /* Return the last insn emitted in current sequence or current function. */
2878 get_last_insn (void)
2880 return last_insn;
2883 /* Specify a new insn as the last in the chain. */
2885 void
2886 set_last_insn (rtx insn)
2888 if (NEXT_INSN (insn) != 0)
2889 abort ();
2890 last_insn = insn;
2893 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2896 get_last_insn_anywhere (void)
2898 struct sequence_stack *stack;
2899 if (last_insn)
2900 return last_insn;
2901 for (stack = seq_stack; stack; stack = stack->next)
2902 if (stack->last != 0)
2903 return stack->last;
2904 return 0;
2907 /* Return the first nonnote insn emitted in current sequence or current
2908 function. This routine looks inside SEQUENCEs. */
2911 get_first_nonnote_insn (void)
2913 rtx insn = first_insn;
2915 while (insn)
2917 insn = next_insn (insn);
2918 if (insn == 0 || GET_CODE (insn) != NOTE)
2919 break;
2922 return insn;
2925 /* Return the last nonnote insn emitted in current sequence or current
2926 function. This routine looks inside SEQUENCEs. */
2929 get_last_nonnote_insn (void)
2931 rtx insn = last_insn;
2933 while (insn)
2935 insn = previous_insn (insn);
2936 if (insn == 0 || GET_CODE (insn) != NOTE)
2937 break;
2940 return insn;
2943 /* Return a number larger than any instruction's uid in this function. */
2946 get_max_uid (void)
2948 return cur_insn_uid;
2951 /* Renumber instructions so that no instruction UIDs are wasted. */
2953 void
2954 renumber_insns (FILE *stream)
2956 rtx insn;
2958 /* If we're not supposed to renumber instructions, don't. */
2959 if (!flag_renumber_insns)
2960 return;
2962 /* If there aren't that many instructions, then it's not really
2963 worth renumbering them. */
2964 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2965 return;
2967 cur_insn_uid = 1;
2969 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2971 if (stream)
2972 fprintf (stream, "Renumbering insn %d to %d\n",
2973 INSN_UID (insn), cur_insn_uid);
2974 INSN_UID (insn) = cur_insn_uid++;
2978 /* Return the next insn. If it is a SEQUENCE, return the first insn
2979 of the sequence. */
2982 next_insn (rtx insn)
2984 if (insn)
2986 insn = NEXT_INSN (insn);
2987 if (insn && GET_CODE (insn) == INSN
2988 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2989 insn = XVECEXP (PATTERN (insn), 0, 0);
2992 return insn;
2995 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2996 of the sequence. */
2999 previous_insn (rtx insn)
3001 if (insn)
3003 insn = PREV_INSN (insn);
3004 if (insn && GET_CODE (insn) == INSN
3005 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3006 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3009 return insn;
3012 /* Return the next insn after INSN that is not a NOTE. This routine does not
3013 look inside SEQUENCEs. */
3016 next_nonnote_insn (rtx insn)
3018 while (insn)
3020 insn = NEXT_INSN (insn);
3021 if (insn == 0 || GET_CODE (insn) != NOTE)
3022 break;
3025 return insn;
3028 /* Return the previous insn before INSN that is not a NOTE. This routine does
3029 not look inside SEQUENCEs. */
3032 prev_nonnote_insn (rtx insn)
3034 while (insn)
3036 insn = PREV_INSN (insn);
3037 if (insn == 0 || GET_CODE (insn) != NOTE)
3038 break;
3041 return insn;
3044 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3045 or 0, if there is none. This routine does not look inside
3046 SEQUENCEs. */
3049 next_real_insn (rtx insn)
3051 while (insn)
3053 insn = NEXT_INSN (insn);
3054 if (insn == 0 || GET_CODE (insn) == INSN
3055 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3056 break;
3059 return insn;
3062 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3063 or 0, if there is none. This routine does not look inside
3064 SEQUENCEs. */
3067 prev_real_insn (rtx insn)
3069 while (insn)
3071 insn = PREV_INSN (insn);
3072 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3073 || GET_CODE (insn) == JUMP_INSN)
3074 break;
3077 return insn;
3080 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3081 This routine does not look inside SEQUENCEs. */
3084 last_call_insn (void)
3086 rtx insn;
3088 for (insn = get_last_insn ();
3089 insn && GET_CODE (insn) != CALL_INSN;
3090 insn = PREV_INSN (insn))
3093 return insn;
3096 /* Find the next insn after INSN that really does something. This routine
3097 does not look inside SEQUENCEs. Until reload has completed, this is the
3098 same as next_real_insn. */
3101 active_insn_p (rtx insn)
3103 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3104 || (GET_CODE (insn) == INSN
3105 && (! reload_completed
3106 || (GET_CODE (PATTERN (insn)) != USE
3107 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3111 next_active_insn (rtx insn)
3113 while (insn)
3115 insn = NEXT_INSN (insn);
3116 if (insn == 0 || active_insn_p (insn))
3117 break;
3120 return insn;
3123 /* Find the last insn before INSN that really does something. This routine
3124 does not look inside SEQUENCEs. Until reload has completed, this is the
3125 same as prev_real_insn. */
3128 prev_active_insn (rtx insn)
3130 while (insn)
3132 insn = PREV_INSN (insn);
3133 if (insn == 0 || active_insn_p (insn))
3134 break;
3137 return insn;
3140 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3143 next_label (rtx insn)
3145 while (insn)
3147 insn = NEXT_INSN (insn);
3148 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3149 break;
3152 return insn;
3155 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3158 prev_label (rtx insn)
3160 while (insn)
3162 insn = PREV_INSN (insn);
3163 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3164 break;
3167 return insn;
3170 #ifdef HAVE_cc0
3171 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3172 and REG_CC_USER notes so we can find it. */
3174 void
3175 link_cc0_insns (rtx insn)
3177 rtx user = next_nonnote_insn (insn);
3179 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3180 user = XVECEXP (PATTERN (user), 0, 0);
3182 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3183 REG_NOTES (user));
3184 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3187 /* Return the next insn that uses CC0 after INSN, which is assumed to
3188 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3189 applied to the result of this function should yield INSN).
3191 Normally, this is simply the next insn. However, if a REG_CC_USER note
3192 is present, it contains the insn that uses CC0.
3194 Return 0 if we can't find the insn. */
3197 next_cc0_user (rtx insn)
3199 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3201 if (note)
3202 return XEXP (note, 0);
3204 insn = next_nonnote_insn (insn);
3205 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3206 insn = XVECEXP (PATTERN (insn), 0, 0);
3208 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3209 return insn;
3211 return 0;
3214 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3215 note, it is the previous insn. */
3218 prev_cc0_setter (rtx insn)
3220 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3222 if (note)
3223 return XEXP (note, 0);
3225 insn = prev_nonnote_insn (insn);
3226 if (! sets_cc0_p (PATTERN (insn)))
3227 abort ();
3229 return insn;
3231 #endif
3233 /* Increment the label uses for all labels present in rtx. */
3235 static void
3236 mark_label_nuses (rtx x)
3238 enum rtx_code code;
3239 int i, j;
3240 const char *fmt;
3242 code = GET_CODE (x);
3243 if (code == LABEL_REF)
3244 LABEL_NUSES (XEXP (x, 0))++;
3246 fmt = GET_RTX_FORMAT (code);
3247 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3249 if (fmt[i] == 'e')
3250 mark_label_nuses (XEXP (x, i));
3251 else if (fmt[i] == 'E')
3252 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3253 mark_label_nuses (XVECEXP (x, i, j));
3258 /* Try splitting insns that can be split for better scheduling.
3259 PAT is the pattern which might split.
3260 TRIAL is the insn providing PAT.
3261 LAST is nonzero if we should return the last insn of the sequence produced.
3263 If this routine succeeds in splitting, it returns the first or last
3264 replacement insn depending on the value of LAST. Otherwise, it
3265 returns TRIAL. If the insn to be returned can be split, it will be. */
3268 try_split (rtx pat, rtx trial, int last)
3270 rtx before = PREV_INSN (trial);
3271 rtx after = NEXT_INSN (trial);
3272 int has_barrier = 0;
3273 rtx tem;
3274 rtx note, seq;
3275 int probability;
3276 rtx insn_last, insn;
3277 int njumps = 0;
3279 if (any_condjump_p (trial)
3280 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3281 split_branch_probability = INTVAL (XEXP (note, 0));
3282 probability = split_branch_probability;
3284 seq = split_insns (pat, trial);
3286 split_branch_probability = -1;
3288 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3289 We may need to handle this specially. */
3290 if (after && GET_CODE (after) == BARRIER)
3292 has_barrier = 1;
3293 after = NEXT_INSN (after);
3296 if (!seq)
3297 return trial;
3299 /* Avoid infinite loop if any insn of the result matches
3300 the original pattern. */
3301 insn_last = seq;
3302 while (1)
3304 if (INSN_P (insn_last)
3305 && rtx_equal_p (PATTERN (insn_last), pat))
3306 return trial;
3307 if (!NEXT_INSN (insn_last))
3308 break;
3309 insn_last = NEXT_INSN (insn_last);
3312 /* Mark labels. */
3313 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3315 if (GET_CODE (insn) == JUMP_INSN)
3317 mark_jump_label (PATTERN (insn), insn, 0);
3318 njumps++;
3319 if (probability != -1
3320 && any_condjump_p (insn)
3321 && !find_reg_note (insn, REG_BR_PROB, 0))
3323 /* We can preserve the REG_BR_PROB notes only if exactly
3324 one jump is created, otherwise the machine description
3325 is responsible for this step using
3326 split_branch_probability variable. */
3327 if (njumps != 1)
3328 abort ();
3329 REG_NOTES (insn)
3330 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3331 GEN_INT (probability),
3332 REG_NOTES (insn));
3337 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3338 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3339 if (GET_CODE (trial) == CALL_INSN)
3341 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3342 if (GET_CODE (insn) == CALL_INSN)
3344 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3345 while (*p)
3346 p = &XEXP (*p, 1);
3347 *p = CALL_INSN_FUNCTION_USAGE (trial);
3348 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3352 /* Copy notes, particularly those related to the CFG. */
3353 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3355 switch (REG_NOTE_KIND (note))
3357 case REG_EH_REGION:
3358 insn = insn_last;
3359 while (insn != NULL_RTX)
3361 if (GET_CODE (insn) == CALL_INSN
3362 || (flag_non_call_exceptions
3363 && may_trap_p (PATTERN (insn))))
3364 REG_NOTES (insn)
3365 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3366 XEXP (note, 0),
3367 REG_NOTES (insn));
3368 insn = PREV_INSN (insn);
3370 break;
3372 case REG_NORETURN:
3373 case REG_SETJMP:
3374 case REG_ALWAYS_RETURN:
3375 insn = insn_last;
3376 while (insn != NULL_RTX)
3378 if (GET_CODE (insn) == CALL_INSN)
3379 REG_NOTES (insn)
3380 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3381 XEXP (note, 0),
3382 REG_NOTES (insn));
3383 insn = PREV_INSN (insn);
3385 break;
3387 case REG_NON_LOCAL_GOTO:
3388 insn = insn_last;
3389 while (insn != NULL_RTX)
3391 if (GET_CODE (insn) == JUMP_INSN)
3392 REG_NOTES (insn)
3393 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3394 XEXP (note, 0),
3395 REG_NOTES (insn));
3396 insn = PREV_INSN (insn);
3398 break;
3400 default:
3401 break;
3405 /* If there are LABELS inside the split insns increment the
3406 usage count so we don't delete the label. */
3407 if (GET_CODE (trial) == INSN)
3409 insn = insn_last;
3410 while (insn != NULL_RTX)
3412 if (GET_CODE (insn) == INSN)
3413 mark_label_nuses (PATTERN (insn));
3415 insn = PREV_INSN (insn);
3419 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3421 delete_insn (trial);
3422 if (has_barrier)
3423 emit_barrier_after (tem);
3425 /* Recursively call try_split for each new insn created; by the
3426 time control returns here that insn will be fully split, so
3427 set LAST and continue from the insn after the one returned.
3428 We can't use next_active_insn here since AFTER may be a note.
3429 Ignore deleted insns, which can be occur if not optimizing. */
3430 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3431 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3432 tem = try_split (PATTERN (tem), tem, 1);
3434 /* Return either the first or the last insn, depending on which was
3435 requested. */
3436 return last
3437 ? (after ? PREV_INSN (after) : last_insn)
3438 : NEXT_INSN (before);
3441 /* Make and return an INSN rtx, initializing all its slots.
3442 Store PATTERN in the pattern slots. */
3445 make_insn_raw (rtx pattern)
3447 rtx insn;
3449 insn = rtx_alloc (INSN);
3451 INSN_UID (insn) = cur_insn_uid++;
3452 PATTERN (insn) = pattern;
3453 INSN_CODE (insn) = -1;
3454 LOG_LINKS (insn) = NULL;
3455 REG_NOTES (insn) = NULL;
3456 INSN_LOCATOR (insn) = 0;
3457 BLOCK_FOR_INSN (insn) = NULL;
3459 #ifdef ENABLE_RTL_CHECKING
3460 if (insn
3461 && INSN_P (insn)
3462 && (returnjump_p (insn)
3463 || (GET_CODE (insn) == SET
3464 && SET_DEST (insn) == pc_rtx)))
3466 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3467 debug_rtx (insn);
3469 #endif
3471 return insn;
3474 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3476 static rtx
3477 make_jump_insn_raw (rtx pattern)
3479 rtx insn;
3481 insn = rtx_alloc (JUMP_INSN);
3482 INSN_UID (insn) = cur_insn_uid++;
3484 PATTERN (insn) = pattern;
3485 INSN_CODE (insn) = -1;
3486 LOG_LINKS (insn) = NULL;
3487 REG_NOTES (insn) = NULL;
3488 JUMP_LABEL (insn) = NULL;
3489 INSN_LOCATOR (insn) = 0;
3490 BLOCK_FOR_INSN (insn) = NULL;
3492 return insn;
3495 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3497 static rtx
3498 make_call_insn_raw (rtx pattern)
3500 rtx insn;
3502 insn = rtx_alloc (CALL_INSN);
3503 INSN_UID (insn) = cur_insn_uid++;
3505 PATTERN (insn) = pattern;
3506 INSN_CODE (insn) = -1;
3507 LOG_LINKS (insn) = NULL;
3508 REG_NOTES (insn) = NULL;
3509 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3510 INSN_LOCATOR (insn) = 0;
3511 BLOCK_FOR_INSN (insn) = NULL;
3513 return insn;
3516 /* Add INSN to the end of the doubly-linked list.
3517 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3519 void
3520 add_insn (rtx insn)
3522 PREV_INSN (insn) = last_insn;
3523 NEXT_INSN (insn) = 0;
3525 if (NULL != last_insn)
3526 NEXT_INSN (last_insn) = insn;
3528 if (NULL == first_insn)
3529 first_insn = insn;
3531 last_insn = insn;
3534 /* Add INSN into the doubly-linked list after insn AFTER. This and
3535 the next should be the only functions called to insert an insn once
3536 delay slots have been filled since only they know how to update a
3537 SEQUENCE. */
3539 void
3540 add_insn_after (rtx insn, rtx after)
3542 rtx next = NEXT_INSN (after);
3543 basic_block bb;
3545 if (optimize && INSN_DELETED_P (after))
3546 abort ();
3548 NEXT_INSN (insn) = next;
3549 PREV_INSN (insn) = after;
3551 if (next)
3553 PREV_INSN (next) = insn;
3554 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3555 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3557 else if (last_insn == after)
3558 last_insn = insn;
3559 else
3561 struct sequence_stack *stack = seq_stack;
3562 /* Scan all pending sequences too. */
3563 for (; stack; stack = stack->next)
3564 if (after == stack->last)
3566 stack->last = insn;
3567 break;
3570 if (stack == 0)
3571 abort ();
3574 if (GET_CODE (after) != BARRIER
3575 && GET_CODE (insn) != BARRIER
3576 && (bb = BLOCK_FOR_INSN (after)))
3578 set_block_for_insn (insn, bb);
3579 if (INSN_P (insn))
3580 bb->flags |= BB_DIRTY;
3581 /* Should not happen as first in the BB is always
3582 either NOTE or LABEL. */
3583 if (bb->end == after
3584 /* Avoid clobbering of structure when creating new BB. */
3585 && GET_CODE (insn) != BARRIER
3586 && (GET_CODE (insn) != NOTE
3587 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3588 bb->end = insn;
3591 NEXT_INSN (after) = insn;
3592 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3594 rtx sequence = PATTERN (after);
3595 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3599 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3600 the previous should be the only functions called to insert an insn once
3601 delay slots have been filled since only they know how to update a
3602 SEQUENCE. */
3604 void
3605 add_insn_before (rtx insn, rtx before)
3607 rtx prev = PREV_INSN (before);
3608 basic_block bb;
3610 if (optimize && INSN_DELETED_P (before))
3611 abort ();
3613 PREV_INSN (insn) = prev;
3614 NEXT_INSN (insn) = before;
3616 if (prev)
3618 NEXT_INSN (prev) = insn;
3619 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3621 rtx sequence = PATTERN (prev);
3622 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3625 else if (first_insn == before)
3626 first_insn = insn;
3627 else
3629 struct sequence_stack *stack = seq_stack;
3630 /* Scan all pending sequences too. */
3631 for (; stack; stack = stack->next)
3632 if (before == stack->first)
3634 stack->first = insn;
3635 break;
3638 if (stack == 0)
3639 abort ();
3642 if (GET_CODE (before) != BARRIER
3643 && GET_CODE (insn) != BARRIER
3644 && (bb = BLOCK_FOR_INSN (before)))
3646 set_block_for_insn (insn, bb);
3647 if (INSN_P (insn))
3648 bb->flags |= BB_DIRTY;
3649 /* Should not happen as first in the BB is always
3650 either NOTE or LABEl. */
3651 if (bb->head == insn
3652 /* Avoid clobbering of structure when creating new BB. */
3653 && GET_CODE (insn) != BARRIER
3654 && (GET_CODE (insn) != NOTE
3655 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3656 abort ();
3659 PREV_INSN (before) = insn;
3660 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3661 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3664 /* Remove an insn from its doubly-linked list. This function knows how
3665 to handle sequences. */
3666 void
3667 remove_insn (rtx insn)
3669 rtx next = NEXT_INSN (insn);
3670 rtx prev = PREV_INSN (insn);
3671 basic_block bb;
3673 if (prev)
3675 NEXT_INSN (prev) = next;
3676 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3678 rtx sequence = PATTERN (prev);
3679 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3682 else if (first_insn == insn)
3683 first_insn = next;
3684 else
3686 struct sequence_stack *stack = seq_stack;
3687 /* Scan all pending sequences too. */
3688 for (; stack; stack = stack->next)
3689 if (insn == stack->first)
3691 stack->first = next;
3692 break;
3695 if (stack == 0)
3696 abort ();
3699 if (next)
3701 PREV_INSN (next) = prev;
3702 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3703 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3705 else if (last_insn == insn)
3706 last_insn = prev;
3707 else
3709 struct sequence_stack *stack = seq_stack;
3710 /* Scan all pending sequences too. */
3711 for (; stack; stack = stack->next)
3712 if (insn == stack->last)
3714 stack->last = prev;
3715 break;
3718 if (stack == 0)
3719 abort ();
3721 if (GET_CODE (insn) != BARRIER
3722 && (bb = BLOCK_FOR_INSN (insn)))
3724 if (INSN_P (insn))
3725 bb->flags |= BB_DIRTY;
3726 if (bb->head == insn)
3728 /* Never ever delete the basic block note without deleting whole
3729 basic block. */
3730 if (GET_CODE (insn) == NOTE)
3731 abort ();
3732 bb->head = next;
3734 if (bb->end == insn)
3735 bb->end = prev;
3739 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3741 void
3742 add_function_usage_to (rtx call_insn, rtx call_fusage)
3744 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3745 abort ();
3747 /* Put the register usage information on the CALL. If there is already
3748 some usage information, put ours at the end. */
3749 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3751 rtx link;
3753 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3754 link = XEXP (link, 1))
3757 XEXP (link, 1) = call_fusage;
3759 else
3760 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3763 /* Delete all insns made since FROM.
3764 FROM becomes the new last instruction. */
3766 void
3767 delete_insns_since (rtx from)
3769 if (from == 0)
3770 first_insn = 0;
3771 else
3772 NEXT_INSN (from) = 0;
3773 last_insn = from;
3776 /* This function is deprecated, please use sequences instead.
3778 Move a consecutive bunch of insns to a different place in the chain.
3779 The insns to be moved are those between FROM and TO.
3780 They are moved to a new position after the insn AFTER.
3781 AFTER must not be FROM or TO or any insn in between.
3783 This function does not know about SEQUENCEs and hence should not be
3784 called after delay-slot filling has been done. */
3786 void
3787 reorder_insns_nobb (rtx from, rtx to, rtx after)
3789 /* Splice this bunch out of where it is now. */
3790 if (PREV_INSN (from))
3791 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3792 if (NEXT_INSN (to))
3793 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3794 if (last_insn == to)
3795 last_insn = PREV_INSN (from);
3796 if (first_insn == from)
3797 first_insn = NEXT_INSN (to);
3799 /* Make the new neighbors point to it and it to them. */
3800 if (NEXT_INSN (after))
3801 PREV_INSN (NEXT_INSN (after)) = to;
3803 NEXT_INSN (to) = NEXT_INSN (after);
3804 PREV_INSN (from) = after;
3805 NEXT_INSN (after) = from;
3806 if (after == last_insn)
3807 last_insn = to;
3810 /* Same as function above, but take care to update BB boundaries. */
3811 void
3812 reorder_insns (rtx from, rtx to, rtx after)
3814 rtx prev = PREV_INSN (from);
3815 basic_block bb, bb2;
3817 reorder_insns_nobb (from, to, after);
3819 if (GET_CODE (after) != BARRIER
3820 && (bb = BLOCK_FOR_INSN (after)))
3822 rtx x;
3823 bb->flags |= BB_DIRTY;
3825 if (GET_CODE (from) != BARRIER
3826 && (bb2 = BLOCK_FOR_INSN (from)))
3828 if (bb2->end == to)
3829 bb2->end = prev;
3830 bb2->flags |= BB_DIRTY;
3833 if (bb->end == after)
3834 bb->end = to;
3836 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3837 set_block_for_insn (x, bb);
3841 /* Return the line note insn preceding INSN. */
3843 static rtx
3844 find_line_note (rtx insn)
3846 if (no_line_numbers)
3847 return 0;
3849 for (; insn; insn = PREV_INSN (insn))
3850 if (GET_CODE (insn) == NOTE
3851 && NOTE_LINE_NUMBER (insn) >= 0)
3852 break;
3854 return insn;
3857 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3858 of the moved insns when debugging. This may insert a note between AFTER
3859 and FROM, and another one after TO. */
3861 void
3862 reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
3864 rtx from_line = find_line_note (from);
3865 rtx after_line = find_line_note (after);
3867 reorder_insns (from, to, after);
3869 if (from_line == after_line)
3870 return;
3872 if (from_line)
3873 emit_note_copy_after (from_line, after);
3874 if (after_line)
3875 emit_note_copy_after (after_line, to);
3878 /* Remove unnecessary notes from the instruction stream. */
3880 void
3881 remove_unnecessary_notes (void)
3883 rtx block_stack = NULL_RTX;
3884 rtx eh_stack = NULL_RTX;
3885 rtx insn;
3886 rtx next;
3887 rtx tmp;
3889 /* We must not remove the first instruction in the function because
3890 the compiler depends on the first instruction being a note. */
3891 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3893 /* Remember what's next. */
3894 next = NEXT_INSN (insn);
3896 /* We're only interested in notes. */
3897 if (GET_CODE (insn) != NOTE)
3898 continue;
3900 switch (NOTE_LINE_NUMBER (insn))
3902 case NOTE_INSN_DELETED:
3903 case NOTE_INSN_LOOP_END_TOP_COND:
3904 remove_insn (insn);
3905 break;
3907 case NOTE_INSN_EH_REGION_BEG:
3908 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3909 break;
3911 case NOTE_INSN_EH_REGION_END:
3912 /* Too many end notes. */
3913 if (eh_stack == NULL_RTX)
3914 abort ();
3915 /* Mismatched nesting. */
3916 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3917 abort ();
3918 tmp = eh_stack;
3919 eh_stack = XEXP (eh_stack, 1);
3920 free_INSN_LIST_node (tmp);
3921 break;
3923 case NOTE_INSN_BLOCK_BEG:
3924 /* By now, all notes indicating lexical blocks should have
3925 NOTE_BLOCK filled in. */
3926 if (NOTE_BLOCK (insn) == NULL_TREE)
3927 abort ();
3928 block_stack = alloc_INSN_LIST (insn, block_stack);
3929 break;
3931 case NOTE_INSN_BLOCK_END:
3932 /* Too many end notes. */
3933 if (block_stack == NULL_RTX)
3934 abort ();
3935 /* Mismatched nesting. */
3936 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3937 abort ();
3938 tmp = block_stack;
3939 block_stack = XEXP (block_stack, 1);
3940 free_INSN_LIST_node (tmp);
3942 /* Scan back to see if there are any non-note instructions
3943 between INSN and the beginning of this block. If not,
3944 then there is no PC range in the generated code that will
3945 actually be in this block, so there's no point in
3946 remembering the existence of the block. */
3947 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3949 /* This block contains a real instruction. Note that we
3950 don't include labels; if the only thing in the block
3951 is a label, then there are still no PC values that
3952 lie within the block. */
3953 if (INSN_P (tmp))
3954 break;
3956 /* We're only interested in NOTEs. */
3957 if (GET_CODE (tmp) != NOTE)
3958 continue;
3960 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3962 /* We just verified that this BLOCK matches us with
3963 the block_stack check above. Never delete the
3964 BLOCK for the outermost scope of the function; we
3965 can refer to names from that scope even if the
3966 block notes are messed up. */
3967 if (! is_body_block (NOTE_BLOCK (insn))
3968 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3970 remove_insn (tmp);
3971 remove_insn (insn);
3973 break;
3975 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3976 /* There's a nested block. We need to leave the
3977 current block in place since otherwise the debugger
3978 wouldn't be able to show symbols from our block in
3979 the nested block. */
3980 break;
3985 /* Too many begin notes. */
3986 if (block_stack || eh_stack)
3987 abort ();
3991 /* Emit insn(s) of given code and pattern
3992 at a specified place within the doubly-linked list.
3994 All of the emit_foo global entry points accept an object
3995 X which is either an insn list or a PATTERN of a single
3996 instruction.
3998 There are thus a few canonical ways to generate code and
3999 emit it at a specific place in the instruction stream. For
4000 example, consider the instruction named SPOT and the fact that
4001 we would like to emit some instructions before SPOT. We might
4002 do it like this:
4004 start_sequence ();
4005 ... emit the new instructions ...
4006 insns_head = get_insns ();
4007 end_sequence ();
4009 emit_insn_before (insns_head, SPOT);
4011 It used to be common to generate SEQUENCE rtl instead, but that
4012 is a relic of the past which no longer occurs. The reason is that
4013 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4014 generated would almost certainly die right after it was created. */
4016 /* Make X be output before the instruction BEFORE. */
4019 emit_insn_before (rtx x, rtx before)
4021 rtx last = before;
4022 rtx insn;
4024 #ifdef ENABLE_RTL_CHECKING
4025 if (before == NULL_RTX)
4026 abort ();
4027 #endif
4029 if (x == NULL_RTX)
4030 return last;
4032 switch (GET_CODE (x))
4034 case INSN:
4035 case JUMP_INSN:
4036 case CALL_INSN:
4037 case CODE_LABEL:
4038 case BARRIER:
4039 case NOTE:
4040 insn = x;
4041 while (insn)
4043 rtx next = NEXT_INSN (insn);
4044 add_insn_before (insn, before);
4045 last = insn;
4046 insn = next;
4048 break;
4050 #ifdef ENABLE_RTL_CHECKING
4051 case SEQUENCE:
4052 abort ();
4053 break;
4054 #endif
4056 default:
4057 last = make_insn_raw (x);
4058 add_insn_before (last, before);
4059 break;
4062 return last;
4065 /* Make an instruction with body X and code JUMP_INSN
4066 and output it before the instruction BEFORE. */
4069 emit_jump_insn_before (rtx x, rtx before)
4071 rtx insn, last = NULL_RTX;
4073 #ifdef ENABLE_RTL_CHECKING
4074 if (before == NULL_RTX)
4075 abort ();
4076 #endif
4078 switch (GET_CODE (x))
4080 case INSN:
4081 case JUMP_INSN:
4082 case CALL_INSN:
4083 case CODE_LABEL:
4084 case BARRIER:
4085 case NOTE:
4086 insn = x;
4087 while (insn)
4089 rtx next = NEXT_INSN (insn);
4090 add_insn_before (insn, before);
4091 last = insn;
4092 insn = next;
4094 break;
4096 #ifdef ENABLE_RTL_CHECKING
4097 case SEQUENCE:
4098 abort ();
4099 break;
4100 #endif
4102 default:
4103 last = make_jump_insn_raw (x);
4104 add_insn_before (last, before);
4105 break;
4108 return last;
4111 /* Make an instruction with body X and code CALL_INSN
4112 and output it before the instruction BEFORE. */
4115 emit_call_insn_before (rtx x, rtx before)
4117 rtx last = NULL_RTX, insn;
4119 #ifdef ENABLE_RTL_CHECKING
4120 if (before == NULL_RTX)
4121 abort ();
4122 #endif
4124 switch (GET_CODE (x))
4126 case INSN:
4127 case JUMP_INSN:
4128 case CALL_INSN:
4129 case CODE_LABEL:
4130 case BARRIER:
4131 case NOTE:
4132 insn = x;
4133 while (insn)
4135 rtx next = NEXT_INSN (insn);
4136 add_insn_before (insn, before);
4137 last = insn;
4138 insn = next;
4140 break;
4142 #ifdef ENABLE_RTL_CHECKING
4143 case SEQUENCE:
4144 abort ();
4145 break;
4146 #endif
4148 default:
4149 last = make_call_insn_raw (x);
4150 add_insn_before (last, before);
4151 break;
4154 return last;
4157 /* Make an insn of code BARRIER
4158 and output it before the insn BEFORE. */
4161 emit_barrier_before (rtx before)
4163 rtx insn = rtx_alloc (BARRIER);
4165 INSN_UID (insn) = cur_insn_uid++;
4167 add_insn_before (insn, before);
4168 return insn;
4171 /* Emit the label LABEL before the insn BEFORE. */
4174 emit_label_before (rtx label, rtx before)
4176 /* This can be called twice for the same label as a result of the
4177 confusion that follows a syntax error! So make it harmless. */
4178 if (INSN_UID (label) == 0)
4180 INSN_UID (label) = cur_insn_uid++;
4181 add_insn_before (label, before);
4184 return label;
4187 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4190 emit_note_before (int subtype, rtx before)
4192 rtx note = rtx_alloc (NOTE);
4193 INSN_UID (note) = cur_insn_uid++;
4194 NOTE_SOURCE_FILE (note) = 0;
4195 NOTE_LINE_NUMBER (note) = subtype;
4196 BLOCK_FOR_INSN (note) = NULL;
4198 add_insn_before (note, before);
4199 return note;
4202 /* Helper for emit_insn_after, handles lists of instructions
4203 efficiently. */
4205 static rtx emit_insn_after_1 (rtx, rtx);
4207 static rtx
4208 emit_insn_after_1 (rtx first, rtx after)
4210 rtx last;
4211 rtx after_after;
4212 basic_block bb;
4214 if (GET_CODE (after) != BARRIER
4215 && (bb = BLOCK_FOR_INSN (after)))
4217 bb->flags |= BB_DIRTY;
4218 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4219 if (GET_CODE (last) != BARRIER)
4220 set_block_for_insn (last, bb);
4221 if (GET_CODE (last) != BARRIER)
4222 set_block_for_insn (last, bb);
4223 if (bb->end == after)
4224 bb->end = last;
4226 else
4227 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4228 continue;
4230 after_after = NEXT_INSN (after);
4232 NEXT_INSN (after) = first;
4233 PREV_INSN (first) = after;
4234 NEXT_INSN (last) = after_after;
4235 if (after_after)
4236 PREV_INSN (after_after) = last;
4238 if (after == last_insn)
4239 last_insn = last;
4240 return last;
4243 /* Make X be output after the insn AFTER. */
4246 emit_insn_after (rtx x, rtx after)
4248 rtx last = after;
4250 #ifdef ENABLE_RTL_CHECKING
4251 if (after == NULL_RTX)
4252 abort ();
4253 #endif
4255 if (x == NULL_RTX)
4256 return last;
4258 switch (GET_CODE (x))
4260 case INSN:
4261 case JUMP_INSN:
4262 case CALL_INSN:
4263 case CODE_LABEL:
4264 case BARRIER:
4265 case NOTE:
4266 last = emit_insn_after_1 (x, after);
4267 break;
4269 #ifdef ENABLE_RTL_CHECKING
4270 case SEQUENCE:
4271 abort ();
4272 break;
4273 #endif
4275 default:
4276 last = make_insn_raw (x);
4277 add_insn_after (last, after);
4278 break;
4281 return last;
4284 /* Similar to emit_insn_after, except that line notes are to be inserted so
4285 as to act as if this insn were at FROM. */
4287 void
4288 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4290 rtx from_line = find_line_note (from);
4291 rtx after_line = find_line_note (after);
4292 rtx insn = emit_insn_after (x, after);
4294 if (from_line)
4295 emit_note_copy_after (from_line, after);
4297 if (after_line)
4298 emit_note_copy_after (after_line, insn);
4301 /* Make an insn of code JUMP_INSN with body X
4302 and output it after the insn AFTER. */
4305 emit_jump_insn_after (rtx x, rtx after)
4307 rtx last;
4309 #ifdef ENABLE_RTL_CHECKING
4310 if (after == NULL_RTX)
4311 abort ();
4312 #endif
4314 switch (GET_CODE (x))
4316 case INSN:
4317 case JUMP_INSN:
4318 case CALL_INSN:
4319 case CODE_LABEL:
4320 case BARRIER:
4321 case NOTE:
4322 last = emit_insn_after_1 (x, after);
4323 break;
4325 #ifdef ENABLE_RTL_CHECKING
4326 case SEQUENCE:
4327 abort ();
4328 break;
4329 #endif
4331 default:
4332 last = make_jump_insn_raw (x);
4333 add_insn_after (last, after);
4334 break;
4337 return last;
4340 /* Make an instruction with body X and code CALL_INSN
4341 and output it after the instruction AFTER. */
4344 emit_call_insn_after (rtx x, rtx after)
4346 rtx last;
4348 #ifdef ENABLE_RTL_CHECKING
4349 if (after == NULL_RTX)
4350 abort ();
4351 #endif
4353 switch (GET_CODE (x))
4355 case INSN:
4356 case JUMP_INSN:
4357 case CALL_INSN:
4358 case CODE_LABEL:
4359 case BARRIER:
4360 case NOTE:
4361 last = emit_insn_after_1 (x, after);
4362 break;
4364 #ifdef ENABLE_RTL_CHECKING
4365 case SEQUENCE:
4366 abort ();
4367 break;
4368 #endif
4370 default:
4371 last = make_call_insn_raw (x);
4372 add_insn_after (last, after);
4373 break;
4376 return last;
4379 /* Make an insn of code BARRIER
4380 and output it after the insn AFTER. */
4383 emit_barrier_after (rtx after)
4385 rtx insn = rtx_alloc (BARRIER);
4387 INSN_UID (insn) = cur_insn_uid++;
4389 add_insn_after (insn, after);
4390 return insn;
4393 /* Emit the label LABEL after the insn AFTER. */
4396 emit_label_after (rtx label, rtx after)
4398 /* This can be called twice for the same label
4399 as a result of the confusion that follows a syntax error!
4400 So make it harmless. */
4401 if (INSN_UID (label) == 0)
4403 INSN_UID (label) = cur_insn_uid++;
4404 add_insn_after (label, after);
4407 return label;
4410 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4413 emit_note_after (int subtype, rtx after)
4415 rtx note = rtx_alloc (NOTE);
4416 INSN_UID (note) = cur_insn_uid++;
4417 NOTE_SOURCE_FILE (note) = 0;
4418 NOTE_LINE_NUMBER (note) = subtype;
4419 BLOCK_FOR_INSN (note) = NULL;
4420 add_insn_after (note, after);
4421 return note;
4424 /* Emit a copy of note ORIG after the insn AFTER. */
4427 emit_note_copy_after (rtx orig, rtx after)
4429 rtx note;
4431 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4433 cur_insn_uid++;
4434 return 0;
4437 note = rtx_alloc (NOTE);
4438 INSN_UID (note) = cur_insn_uid++;
4439 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4440 NOTE_DATA (note) = NOTE_DATA (orig);
4441 BLOCK_FOR_INSN (note) = NULL;
4442 add_insn_after (note, after);
4443 return note;
4446 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4448 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4450 rtx last = emit_insn_after (pattern, after);
4452 after = NEXT_INSN (after);
4453 while (1)
4455 if (active_insn_p (after))
4456 INSN_LOCATOR (after) = loc;
4457 if (after == last)
4458 break;
4459 after = NEXT_INSN (after);
4461 return last;
4464 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4466 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4468 rtx last = emit_jump_insn_after (pattern, after);
4470 after = NEXT_INSN (after);
4471 while (1)
4473 if (active_insn_p (after))
4474 INSN_LOCATOR (after) = loc;
4475 if (after == last)
4476 break;
4477 after = NEXT_INSN (after);
4479 return last;
4482 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4484 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4486 rtx last = emit_call_insn_after (pattern, after);
4488 after = NEXT_INSN (after);
4489 while (1)
4491 if (active_insn_p (after))
4492 INSN_LOCATOR (after) = loc;
4493 if (after == last)
4494 break;
4495 after = NEXT_INSN (after);
4497 return last;
4500 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4502 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4504 rtx first = PREV_INSN (before);
4505 rtx last = emit_insn_before (pattern, before);
4507 first = NEXT_INSN (first);
4508 while (1)
4510 if (active_insn_p (first))
4511 INSN_LOCATOR (first) = loc;
4512 if (first == last)
4513 break;
4514 first = NEXT_INSN (first);
4516 return last;
4519 /* Take X and emit it at the end of the doubly-linked
4520 INSN list.
4522 Returns the last insn emitted. */
4525 emit_insn (rtx x)
4527 rtx last = last_insn;
4528 rtx insn;
4530 if (x == NULL_RTX)
4531 return last;
4533 switch (GET_CODE (x))
4535 case INSN:
4536 case JUMP_INSN:
4537 case CALL_INSN:
4538 case CODE_LABEL:
4539 case BARRIER:
4540 case NOTE:
4541 insn = x;
4542 while (insn)
4544 rtx next = NEXT_INSN (insn);
4545 add_insn (insn);
4546 last = insn;
4547 insn = next;
4549 break;
4551 #ifdef ENABLE_RTL_CHECKING
4552 case SEQUENCE:
4553 abort ();
4554 break;
4555 #endif
4557 default:
4558 last = make_insn_raw (x);
4559 add_insn (last);
4560 break;
4563 return last;
4566 /* Make an insn of code JUMP_INSN with pattern X
4567 and add it to the end of the doubly-linked list. */
4570 emit_jump_insn (rtx x)
4572 rtx last = NULL_RTX, insn;
4574 switch (GET_CODE (x))
4576 case INSN:
4577 case JUMP_INSN:
4578 case CALL_INSN:
4579 case CODE_LABEL:
4580 case BARRIER:
4581 case NOTE:
4582 insn = x;
4583 while (insn)
4585 rtx next = NEXT_INSN (insn);
4586 add_insn (insn);
4587 last = insn;
4588 insn = next;
4590 break;
4592 #ifdef ENABLE_RTL_CHECKING
4593 case SEQUENCE:
4594 abort ();
4595 break;
4596 #endif
4598 default:
4599 last = make_jump_insn_raw (x);
4600 add_insn (last);
4601 break;
4604 return last;
4607 /* Make an insn of code CALL_INSN with pattern X
4608 and add it to the end of the doubly-linked list. */
4611 emit_call_insn (rtx x)
4613 rtx insn;
4615 switch (GET_CODE (x))
4617 case INSN:
4618 case JUMP_INSN:
4619 case CALL_INSN:
4620 case CODE_LABEL:
4621 case BARRIER:
4622 case NOTE:
4623 insn = emit_insn (x);
4624 break;
4626 #ifdef ENABLE_RTL_CHECKING
4627 case SEQUENCE:
4628 abort ();
4629 break;
4630 #endif
4632 default:
4633 insn = make_call_insn_raw (x);
4634 add_insn (insn);
4635 break;
4638 return insn;
4641 /* Add the label LABEL to the end of the doubly-linked list. */
4644 emit_label (rtx label)
4646 /* This can be called twice for the same label
4647 as a result of the confusion that follows a syntax error!
4648 So make it harmless. */
4649 if (INSN_UID (label) == 0)
4651 INSN_UID (label) = cur_insn_uid++;
4652 add_insn (label);
4654 return label;
4657 /* Make an insn of code BARRIER
4658 and add it to the end of the doubly-linked list. */
4661 emit_barrier (void)
4663 rtx barrier = rtx_alloc (BARRIER);
4664 INSN_UID (barrier) = cur_insn_uid++;
4665 add_insn (barrier);
4666 return barrier;
4669 /* Make line numbering NOTE insn for LOCATION add it to the end
4670 of the doubly-linked list, but only if line-numbers are desired for
4671 debugging info and it doesn't match the previous one. */
4674 emit_line_note (location_t location)
4676 rtx note;
4678 set_file_and_line_for_stmt (location);
4680 if (location.file && last_location.file
4681 && !strcmp (location.file, last_location.file)
4682 && location.line == last_location.line)
4683 return NULL_RTX;
4684 last_location = location;
4686 if (no_line_numbers)
4688 cur_insn_uid++;
4689 return NULL_RTX;
4692 note = emit_note (location.line);
4693 NOTE_SOURCE_FILE (note) = location.file;
4695 return note;
4698 /* Emit a copy of note ORIG. */
4701 emit_note_copy (rtx orig)
4703 rtx note;
4705 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4707 cur_insn_uid++;
4708 return NULL_RTX;
4711 note = rtx_alloc (NOTE);
4713 INSN_UID (note) = cur_insn_uid++;
4714 NOTE_DATA (note) = NOTE_DATA (orig);
4715 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4716 BLOCK_FOR_INSN (note) = NULL;
4717 add_insn (note);
4719 return note;
4722 /* Make an insn of code NOTE or type NOTE_NO
4723 and add it to the end of the doubly-linked list. */
4726 emit_note (int note_no)
4728 rtx note;
4730 note = rtx_alloc (NOTE);
4731 INSN_UID (note) = cur_insn_uid++;
4732 NOTE_LINE_NUMBER (note) = note_no;
4733 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4734 BLOCK_FOR_INSN (note) = NULL;
4735 add_insn (note);
4736 return note;
4739 /* Cause next statement to emit a line note even if the line number
4740 has not changed. */
4742 void
4743 force_next_line_note (void)
4745 last_location.line = -1;
4748 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4749 note of this type already exists, remove it first. */
4752 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4754 rtx note = find_reg_note (insn, kind, NULL_RTX);
4756 switch (kind)
4758 case REG_EQUAL:
4759 case REG_EQUIV:
4760 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4761 has multiple sets (some callers assume single_set
4762 means the insn only has one set, when in fact it
4763 means the insn only has one * useful * set). */
4764 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4766 if (note)
4767 abort ();
4768 return NULL_RTX;
4771 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4772 It serves no useful purpose and breaks eliminate_regs. */
4773 if (GET_CODE (datum) == ASM_OPERANDS)
4774 return NULL_RTX;
4775 break;
4777 default:
4778 break;
4781 if (note)
4783 XEXP (note, 0) = datum;
4784 return note;
4787 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4788 return REG_NOTES (insn);
4791 /* Return an indication of which type of insn should have X as a body.
4792 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4794 enum rtx_code
4795 classify_insn (rtx x)
4797 if (GET_CODE (x) == CODE_LABEL)
4798 return CODE_LABEL;
4799 if (GET_CODE (x) == CALL)
4800 return CALL_INSN;
4801 if (GET_CODE (x) == RETURN)
4802 return JUMP_INSN;
4803 if (GET_CODE (x) == SET)
4805 if (SET_DEST (x) == pc_rtx)
4806 return JUMP_INSN;
4807 else if (GET_CODE (SET_SRC (x)) == CALL)
4808 return CALL_INSN;
4809 else
4810 return INSN;
4812 if (GET_CODE (x) == PARALLEL)
4814 int j;
4815 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4816 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4817 return CALL_INSN;
4818 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4819 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4820 return JUMP_INSN;
4821 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4822 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4823 return CALL_INSN;
4825 return INSN;
4828 /* Emit the rtl pattern X as an appropriate kind of insn.
4829 If X is a label, it is simply added into the insn chain. */
4832 emit (rtx x)
4834 enum rtx_code code = classify_insn (x);
4836 if (code == CODE_LABEL)
4837 return emit_label (x);
4838 else if (code == INSN)
4839 return emit_insn (x);
4840 else if (code == JUMP_INSN)
4842 rtx insn = emit_jump_insn (x);
4843 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4844 return emit_barrier ();
4845 return insn;
4847 else if (code == CALL_INSN)
4848 return emit_call_insn (x);
4849 else
4850 abort ();
4853 /* Space for free sequence stack entries. */
4854 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4856 /* Begin emitting insns to a sequence which can be packaged in an
4857 RTL_EXPR. If this sequence will contain something that might cause
4858 the compiler to pop arguments to function calls (because those
4859 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4860 details), use do_pending_stack_adjust before calling this function.
4861 That will ensure that the deferred pops are not accidentally
4862 emitted in the middle of this sequence. */
4864 void
4865 start_sequence (void)
4867 struct sequence_stack *tem;
4869 if (free_sequence_stack != NULL)
4871 tem = free_sequence_stack;
4872 free_sequence_stack = tem->next;
4874 else
4875 tem = ggc_alloc (sizeof (struct sequence_stack));
4877 tem->next = seq_stack;
4878 tem->first = first_insn;
4879 tem->last = last_insn;
4880 tem->sequence_rtl_expr = seq_rtl_expr;
4882 seq_stack = tem;
4884 first_insn = 0;
4885 last_insn = 0;
4888 /* Similarly, but indicate that this sequence will be placed in T, an
4889 RTL_EXPR. See the documentation for start_sequence for more
4890 information about how to use this function. */
4892 void
4893 start_sequence_for_rtl_expr (tree t)
4895 start_sequence ();
4897 seq_rtl_expr = t;
4900 /* Set up the insn chain starting with FIRST as the current sequence,
4901 saving the previously current one. See the documentation for
4902 start_sequence for more information about how to use this function. */
4904 void
4905 push_to_sequence (rtx first)
4907 rtx last;
4909 start_sequence ();
4911 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4913 first_insn = first;
4914 last_insn = last;
4917 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4919 void
4920 push_to_full_sequence (rtx first, rtx last)
4922 start_sequence ();
4923 first_insn = first;
4924 last_insn = last;
4925 /* We really should have the end of the insn chain here. */
4926 if (last && NEXT_INSN (last))
4927 abort ();
4930 /* Set up the outer-level insn chain
4931 as the current sequence, saving the previously current one. */
4933 void
4934 push_topmost_sequence (void)
4936 struct sequence_stack *stack, *top = NULL;
4938 start_sequence ();
4940 for (stack = seq_stack; stack; stack = stack->next)
4941 top = stack;
4943 first_insn = top->first;
4944 last_insn = top->last;
4945 seq_rtl_expr = top->sequence_rtl_expr;
4948 /* After emitting to the outer-level insn chain, update the outer-level
4949 insn chain, and restore the previous saved state. */
4951 void
4952 pop_topmost_sequence (void)
4954 struct sequence_stack *stack, *top = NULL;
4956 for (stack = seq_stack; stack; stack = stack->next)
4957 top = stack;
4959 top->first = first_insn;
4960 top->last = last_insn;
4961 /* ??? Why don't we save seq_rtl_expr here? */
4963 end_sequence ();
4966 /* After emitting to a sequence, restore previous saved state.
4968 To get the contents of the sequence just made, you must call
4969 `get_insns' *before* calling here.
4971 If the compiler might have deferred popping arguments while
4972 generating this sequence, and this sequence will not be immediately
4973 inserted into the instruction stream, use do_pending_stack_adjust
4974 before calling get_insns. That will ensure that the deferred
4975 pops are inserted into this sequence, and not into some random
4976 location in the instruction stream. See INHIBIT_DEFER_POP for more
4977 information about deferred popping of arguments. */
4979 void
4980 end_sequence (void)
4982 struct sequence_stack *tem = seq_stack;
4984 first_insn = tem->first;
4985 last_insn = tem->last;
4986 seq_rtl_expr = tem->sequence_rtl_expr;
4987 seq_stack = tem->next;
4989 memset (tem, 0, sizeof (*tem));
4990 tem->next = free_sequence_stack;
4991 free_sequence_stack = tem;
4994 /* This works like end_sequence, but records the old sequence in FIRST
4995 and LAST. */
4997 void
4998 end_full_sequence (rtx *first, rtx *last)
5000 *first = first_insn;
5001 *last = last_insn;
5002 end_sequence ();
5005 /* Return 1 if currently emitting into a sequence. */
5008 in_sequence_p (void)
5010 return seq_stack != 0;
5013 /* Put the various virtual registers into REGNO_REG_RTX. */
5015 void
5016 init_virtual_regs (struct emit_status *es)
5018 rtx *ptr = es->x_regno_reg_rtx;
5019 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5020 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5021 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5022 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5023 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5027 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5028 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5029 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5030 static int copy_insn_n_scratches;
5032 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5033 copied an ASM_OPERANDS.
5034 In that case, it is the original input-operand vector. */
5035 static rtvec orig_asm_operands_vector;
5037 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5038 copied an ASM_OPERANDS.
5039 In that case, it is the copied input-operand vector. */
5040 static rtvec copy_asm_operands_vector;
5042 /* Likewise for the constraints vector. */
5043 static rtvec orig_asm_constraints_vector;
5044 static rtvec copy_asm_constraints_vector;
5046 /* Recursively create a new copy of an rtx for copy_insn.
5047 This function differs from copy_rtx in that it handles SCRATCHes and
5048 ASM_OPERANDs properly.
5049 Normally, this function is not used directly; use copy_insn as front end.
5050 However, you could first copy an insn pattern with copy_insn and then use
5051 this function afterwards to properly copy any REG_NOTEs containing
5052 SCRATCHes. */
5055 copy_insn_1 (rtx orig)
5057 rtx copy;
5058 int i, j;
5059 RTX_CODE code;
5060 const char *format_ptr;
5062 code = GET_CODE (orig);
5064 switch (code)
5066 case REG:
5067 case QUEUED:
5068 case CONST_INT:
5069 case CONST_DOUBLE:
5070 case CONST_VECTOR:
5071 case SYMBOL_REF:
5072 case CODE_LABEL:
5073 case PC:
5074 case CC0:
5075 case ADDRESSOF:
5076 return orig;
5078 case SCRATCH:
5079 for (i = 0; i < copy_insn_n_scratches; i++)
5080 if (copy_insn_scratch_in[i] == orig)
5081 return copy_insn_scratch_out[i];
5082 break;
5084 case CONST:
5085 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5086 a LABEL_REF, it isn't sharable. */
5087 if (GET_CODE (XEXP (orig, 0)) == PLUS
5088 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5089 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5090 return orig;
5091 break;
5093 /* A MEM with a constant address is not sharable. The problem is that
5094 the constant address may need to be reloaded. If the mem is shared,
5095 then reloading one copy of this mem will cause all copies to appear
5096 to have been reloaded. */
5098 default:
5099 break;
5102 copy = rtx_alloc (code);
5104 /* Copy the various flags, and other information. We assume that
5105 all fields need copying, and then clear the fields that should
5106 not be copied. That is the sensible default behavior, and forces
5107 us to explicitly document why we are *not* copying a flag. */
5108 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5110 /* We do not copy the USED flag, which is used as a mark bit during
5111 walks over the RTL. */
5112 RTX_FLAG (copy, used) = 0;
5114 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5115 if (GET_RTX_CLASS (code) == 'i')
5117 RTX_FLAG (copy, jump) = 0;
5118 RTX_FLAG (copy, call) = 0;
5119 RTX_FLAG (copy, frame_related) = 0;
5122 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5124 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5126 copy->fld[i] = orig->fld[i];
5127 switch (*format_ptr++)
5129 case 'e':
5130 if (XEXP (orig, i) != NULL)
5131 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5132 break;
5134 case 'E':
5135 case 'V':
5136 if (XVEC (orig, i) == orig_asm_constraints_vector)
5137 XVEC (copy, i) = copy_asm_constraints_vector;
5138 else if (XVEC (orig, i) == orig_asm_operands_vector)
5139 XVEC (copy, i) = copy_asm_operands_vector;
5140 else if (XVEC (orig, i) != NULL)
5142 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5143 for (j = 0; j < XVECLEN (copy, i); j++)
5144 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5146 break;
5148 case 't':
5149 case 'w':
5150 case 'i':
5151 case 's':
5152 case 'S':
5153 case 'u':
5154 case '0':
5155 /* These are left unchanged. */
5156 break;
5158 default:
5159 abort ();
5163 if (code == SCRATCH)
5165 i = copy_insn_n_scratches++;
5166 if (i >= MAX_RECOG_OPERANDS)
5167 abort ();
5168 copy_insn_scratch_in[i] = orig;
5169 copy_insn_scratch_out[i] = copy;
5171 else if (code == ASM_OPERANDS)
5173 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5174 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5175 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5176 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5179 return copy;
5182 /* Create a new copy of an rtx.
5183 This function differs from copy_rtx in that it handles SCRATCHes and
5184 ASM_OPERANDs properly.
5185 INSN doesn't really have to be a full INSN; it could be just the
5186 pattern. */
5188 copy_insn (rtx insn)
5190 copy_insn_n_scratches = 0;
5191 orig_asm_operands_vector = 0;
5192 orig_asm_constraints_vector = 0;
5193 copy_asm_operands_vector = 0;
5194 copy_asm_constraints_vector = 0;
5195 return copy_insn_1 (insn);
5198 /* Initialize data structures and variables in this file
5199 before generating rtl for each function. */
5201 void
5202 init_emit (void)
5204 struct function *f = cfun;
5206 f->emit = ggc_alloc (sizeof (struct emit_status));
5207 first_insn = NULL;
5208 last_insn = NULL;
5209 seq_rtl_expr = NULL;
5210 cur_insn_uid = 1;
5211 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5212 last_location.line = 0;
5213 last_location.file = 0;
5214 first_label_num = label_num;
5215 last_label_num = 0;
5216 seq_stack = NULL;
5218 /* Init the tables that describe all the pseudo regs. */
5220 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5222 f->emit->regno_pointer_align
5223 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5224 * sizeof (unsigned char));
5226 regno_reg_rtx
5227 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5229 /* Put copies of all the hard registers into regno_reg_rtx. */
5230 memcpy (regno_reg_rtx,
5231 static_regno_reg_rtx,
5232 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5234 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5235 init_virtual_regs (f->emit);
5237 /* Indicate that the virtual registers and stack locations are
5238 all pointers. */
5239 REG_POINTER (stack_pointer_rtx) = 1;
5240 REG_POINTER (frame_pointer_rtx) = 1;
5241 REG_POINTER (hard_frame_pointer_rtx) = 1;
5242 REG_POINTER (arg_pointer_rtx) = 1;
5244 REG_POINTER (virtual_incoming_args_rtx) = 1;
5245 REG_POINTER (virtual_stack_vars_rtx) = 1;
5246 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5247 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5248 REG_POINTER (virtual_cfa_rtx) = 1;
5250 #ifdef STACK_BOUNDARY
5251 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5252 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5253 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5254 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5256 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5257 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5258 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5259 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5260 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5261 #endif
5263 #ifdef INIT_EXPANDERS
5264 INIT_EXPANDERS;
5265 #endif
5268 /* Generate the constant 0. */
5270 static rtx
5271 gen_const_vector_0 (enum machine_mode mode)
5273 rtx tem;
5274 rtvec v;
5275 int units, i;
5276 enum machine_mode inner;
5278 units = GET_MODE_NUNITS (mode);
5279 inner = GET_MODE_INNER (mode);
5281 v = rtvec_alloc (units);
5283 /* We need to call this function after we to set CONST0_RTX first. */
5284 if (!CONST0_RTX (inner))
5285 abort ();
5287 for (i = 0; i < units; ++i)
5288 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5290 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5291 return tem;
5294 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5295 all elements are zero. */
5297 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5299 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5300 int i;
5302 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5303 if (RTVEC_ELT (v, i) != inner_zero)
5304 return gen_rtx_raw_CONST_VECTOR (mode, v);
5305 return CONST0_RTX (mode);
5308 /* Create some permanent unique rtl objects shared between all functions.
5309 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5311 void
5312 init_emit_once (int line_numbers)
5314 int i;
5315 enum machine_mode mode;
5316 enum machine_mode double_mode;
5318 /* We need reg_raw_mode, so initialize the modes now. */
5319 init_reg_modes_once ();
5321 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5322 tables. */
5323 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5324 const_int_htab_eq, NULL);
5326 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5327 const_double_htab_eq, NULL);
5329 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5330 mem_attrs_htab_eq, NULL);
5331 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5332 reg_attrs_htab_eq, NULL);
5334 no_line_numbers = ! line_numbers;
5336 /* Compute the word and byte modes. */
5338 byte_mode = VOIDmode;
5339 word_mode = VOIDmode;
5340 double_mode = VOIDmode;
5342 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5343 mode = GET_MODE_WIDER_MODE (mode))
5345 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5346 && byte_mode == VOIDmode)
5347 byte_mode = mode;
5349 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5350 && word_mode == VOIDmode)
5351 word_mode = mode;
5354 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5355 mode = GET_MODE_WIDER_MODE (mode))
5357 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5358 && double_mode == VOIDmode)
5359 double_mode = mode;
5362 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5364 /* Assign register numbers to the globally defined register rtx.
5365 This must be done at runtime because the register number field
5366 is in a union and some compilers can't initialize unions. */
5368 pc_rtx = gen_rtx (PC, VOIDmode);
5369 cc0_rtx = gen_rtx (CC0, VOIDmode);
5370 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5371 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5372 if (hard_frame_pointer_rtx == 0)
5373 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5374 HARD_FRAME_POINTER_REGNUM);
5375 if (arg_pointer_rtx == 0)
5376 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5377 virtual_incoming_args_rtx =
5378 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5379 virtual_stack_vars_rtx =
5380 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5381 virtual_stack_dynamic_rtx =
5382 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5383 virtual_outgoing_args_rtx =
5384 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5385 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5387 /* Initialize RTL for commonly used hard registers. These are
5388 copied into regno_reg_rtx as we begin to compile each function. */
5389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5390 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5392 #ifdef INIT_EXPANDERS
5393 /* This is to initialize {init|mark|free}_machine_status before the first
5394 call to push_function_context_to. This is needed by the Chill front
5395 end which calls push_function_context_to before the first call to
5396 init_function_start. */
5397 INIT_EXPANDERS;
5398 #endif
5400 /* Create the unique rtx's for certain rtx codes and operand values. */
5402 /* Don't use gen_rtx here since gen_rtx in this case
5403 tries to use these variables. */
5404 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5405 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5406 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5408 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5409 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5410 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5411 else
5412 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5414 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5415 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5416 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5417 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5418 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5420 dconsthalf = dconst1;
5421 dconsthalf.exp--;
5423 for (i = 0; i <= 2; i++)
5425 REAL_VALUE_TYPE *r =
5426 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5428 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5429 mode = GET_MODE_WIDER_MODE (mode))
5430 const_tiny_rtx[i][(int) mode] =
5431 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5433 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5435 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5436 mode = GET_MODE_WIDER_MODE (mode))
5437 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5440 mode != VOIDmode;
5441 mode = GET_MODE_WIDER_MODE (mode))
5442 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5445 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5446 mode != VOIDmode;
5447 mode = GET_MODE_WIDER_MODE (mode))
5448 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5450 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5451 mode != VOIDmode;
5452 mode = GET_MODE_WIDER_MODE (mode))
5453 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5455 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5456 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5457 const_tiny_rtx[0][i] = const0_rtx;
5459 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5460 if (STORE_FLAG_VALUE == 1)
5461 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5463 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5464 return_address_pointer_rtx
5465 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5466 #endif
5468 #ifdef STATIC_CHAIN_REGNUM
5469 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5471 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5472 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5473 static_chain_incoming_rtx
5474 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5475 else
5476 #endif
5477 static_chain_incoming_rtx = static_chain_rtx;
5478 #endif
5480 #ifdef STATIC_CHAIN
5481 static_chain_rtx = STATIC_CHAIN;
5483 #ifdef STATIC_CHAIN_INCOMING
5484 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5485 #else
5486 static_chain_incoming_rtx = static_chain_rtx;
5487 #endif
5488 #endif
5490 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5491 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5494 /* Query and clear/ restore no_line_numbers. This is used by the
5495 switch / case handling in stmt.c to give proper line numbers in
5496 warnings about unreachable code. */
5499 force_line_numbers (void)
5501 int old = no_line_numbers;
5503 no_line_numbers = 0;
5504 if (old)
5505 force_next_line_note ();
5506 return old;
5509 void
5510 restore_line_number_status (int old_value)
5512 no_line_numbers = old_value;
5515 /* Produce exact duplicate of insn INSN after AFTER.
5516 Care updating of libcall regions if present. */
5519 emit_copy_of_insn_after (rtx insn, rtx after)
5521 rtx new;
5522 rtx note1, note2, link;
5524 switch (GET_CODE (insn))
5526 case INSN:
5527 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5528 break;
5530 case JUMP_INSN:
5531 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5532 break;
5534 case CALL_INSN:
5535 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5536 if (CALL_INSN_FUNCTION_USAGE (insn))
5537 CALL_INSN_FUNCTION_USAGE (new)
5538 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5539 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5540 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5541 break;
5543 default:
5544 abort ();
5547 /* Update LABEL_NUSES. */
5548 mark_jump_label (PATTERN (new), new, 0);
5550 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5552 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5553 make them. */
5554 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5555 if (REG_NOTE_KIND (link) != REG_LABEL)
5557 if (GET_CODE (link) == EXPR_LIST)
5558 REG_NOTES (new)
5559 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5560 XEXP (link, 0),
5561 REG_NOTES (new)));
5562 else
5563 REG_NOTES (new)
5564 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5565 XEXP (link, 0),
5566 REG_NOTES (new)));
5569 /* Fix the libcall sequences. */
5570 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5572 rtx p = new;
5573 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5574 p = PREV_INSN (p);
5575 XEXP (note1, 0) = p;
5576 XEXP (note2, 0) = new;
5578 INSN_CODE (new) = INSN_CODE (insn);
5579 return new;
5582 #include "gt-emit-rtl.h"