2002-03-31 Segher Boessenkool <segher@koffie.nl>
[official-gcc.git] / gcc / emit-rtl.c
blob4aab67494d4104b22c10263a338aa5da75a8d663
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 /* Middle-to-low level generation of rtx code and insns.
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
61 /* Commonly used modes. */
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
72 static GTY(()) int label_num = 1;
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
78 static int last_label_num;
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
83 static int base_label_num;
85 /* Nonzero means do not generate NOTEs for source line numbers. */
87 static int no_line_numbers;
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
94 rtx global_rtl[GR_MAX];
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
108 rtx const_true_rtx;
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114 REAL_VALUE_TYPE dconstm2;
115 REAL_VALUE_TYPE dconsthalf;
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141 /* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145 /* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
150 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152 /* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
158 /* A hash table storing memory attribute structures. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
162 /* A hash table storing register attribute structures. */
163 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
166 /* A hash table storing all CONST_DOUBLEs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
170 #define first_insn (cfun->emit->x_first_insn)
171 #define last_insn (cfun->emit->x_last_insn)
172 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173 #define last_linenum (cfun->emit->x_last_linenum)
174 #define last_filename (cfun->emit->x_last_filename)
175 #define first_label_num (cfun->emit->x_first_label_num)
177 static rtx make_jump_insn_raw PARAMS ((rtx));
178 static rtx make_call_insn_raw PARAMS ((rtx));
179 static rtx find_line_note PARAMS ((rtx));
180 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
181 int));
182 static void unshare_all_rtl_1 PARAMS ((rtx));
183 static void unshare_all_decls PARAMS ((tree));
184 static void reset_used_decls PARAMS ((tree));
185 static void mark_label_nuses PARAMS ((rtx));
186 static hashval_t const_int_htab_hash PARAMS ((const void *));
187 static int const_int_htab_eq PARAMS ((const void *,
188 const void *));
189 static hashval_t const_double_htab_hash PARAMS ((const void *));
190 static int const_double_htab_eq PARAMS ((const void *,
191 const void *));
192 static rtx lookup_const_double PARAMS ((rtx));
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
195 const void *));
196 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
197 rtx, unsigned int,
198 enum machine_mode));
199 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
200 static int reg_attrs_htab_eq PARAMS ((const void *,
201 const void *));
202 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
203 static tree component_ref_for_mem_expr PARAMS ((tree));
204 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
206 /* Probability of the conditional branch currently proceeded by try_split.
207 Set to -1 otherwise. */
208 int split_branch_probability = -1;
210 /* Returns a hash code for X (which is a really a CONST_INT). */
212 static hashval_t
213 const_int_htab_hash (x)
214 const void *x;
216 return (hashval_t) INTVAL ((struct rtx_def *) x);
219 /* Returns nonzero if the value represented by X (which is really a
220 CONST_INT) is the same as that given by Y (which is really a
221 HOST_WIDE_INT *). */
223 static int
224 const_int_htab_eq (x, y)
225 const void *x;
226 const void *y;
228 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
231 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
232 static hashval_t
233 const_double_htab_hash (x)
234 const void *x;
236 rtx value = (rtx) x;
237 hashval_t h;
239 if (GET_MODE (value) == VOIDmode)
240 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
241 else
243 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
244 /* MODE is used in the comparison, so it should be in the hash. */
245 h ^= GET_MODE (value);
247 return h;
250 /* Returns nonzero if the value represented by X (really a ...)
251 is the same as that represented by Y (really a ...) */
252 static int
253 const_double_htab_eq (x, y)
254 const void *x;
255 const void *y;
257 rtx a = (rtx)x, b = (rtx)y;
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
261 if (GET_MODE (a) == VOIDmode)
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
269 /* Returns a hash code for X (which is a really a mem_attrs *). */
271 static hashval_t
272 mem_attrs_htab_hash (x)
273 const void *x;
275 mem_attrs *p = (mem_attrs *) x;
277 return (p->alias ^ (p->align * 1000)
278 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
279 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
280 ^ (size_t) p->expr);
283 /* Returns nonzero if the value represented by X (which is really a
284 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs *). */
287 static int
288 mem_attrs_htab_eq (x, y)
289 const void *x;
290 const void *y;
292 mem_attrs *p = (mem_attrs *) x;
293 mem_attrs *q = (mem_attrs *) y;
295 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
296 && p->size == q->size && p->align == q->align);
299 /* Allocate a new mem_attrs structure and insert it into the hash table if
300 one identical to it is not already in the table. We are doing this for
301 MEM of mode MODE. */
303 static mem_attrs *
304 get_mem_attrs (alias, expr, offset, size, align, mode)
305 HOST_WIDE_INT alias;
306 tree expr;
307 rtx offset;
308 rtx size;
309 unsigned int align;
310 enum machine_mode mode;
312 mem_attrs attrs;
313 void **slot;
315 /* If everything is the default, we can just return zero. */
316 if (alias == 0 && expr == 0 && offset == 0
317 && (size == 0
318 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
319 && (align == BITS_PER_UNIT
320 || (STRICT_ALIGNMENT
321 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
322 return 0;
324 attrs.alias = alias;
325 attrs.expr = expr;
326 attrs.offset = offset;
327 attrs.size = size;
328 attrs.align = align;
330 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
331 if (*slot == 0)
333 *slot = ggc_alloc (sizeof (mem_attrs));
334 memcpy (*slot, &attrs, sizeof (mem_attrs));
337 return *slot;
340 /* Returns a hash code for X (which is a really a reg_attrs *). */
342 static hashval_t
343 reg_attrs_htab_hash (x)
344 const void *x;
346 reg_attrs *p = (reg_attrs *) x;
348 return ((p->offset * 1000) ^ (long) p->decl);
351 /* Returns non-zero if the value represented by X (which is really a
352 reg_attrs *) is the same as that given by Y (which is also really a
353 reg_attrs *). */
355 static int
356 reg_attrs_htab_eq (x, y)
357 const void *x;
358 const void *y;
360 reg_attrs *p = (reg_attrs *) x;
361 reg_attrs *q = (reg_attrs *) y;
363 return (p->decl == q->decl && p->offset == q->offset);
365 /* Allocate a new reg_attrs structure and insert it into the hash table if
366 one identical to it is not already in the table. We are doing this for
367 MEM of mode MODE. */
369 static reg_attrs *
370 get_reg_attrs (decl, offset)
371 tree decl;
372 int offset;
374 reg_attrs attrs;
375 void **slot;
377 /* If everything is the default, we can just return zero. */
378 if (decl == 0 && offset == 0)
379 return 0;
381 attrs.decl = decl;
382 attrs.offset = offset;
384 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
385 if (*slot == 0)
387 *slot = ggc_alloc (sizeof (reg_attrs));
388 memcpy (*slot, &attrs, sizeof (reg_attrs));
391 return *slot;
394 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
395 don't attempt to share with the various global pieces of rtl (such as
396 frame_pointer_rtx). */
399 gen_raw_REG (mode, regno)
400 enum machine_mode mode;
401 int regno;
403 rtx x = gen_rtx_raw_REG (mode, regno);
404 ORIGINAL_REGNO (x) = regno;
405 return x;
408 /* There are some RTL codes that require special attention; the generation
409 functions do the raw handling. If you add to this list, modify
410 special_rtx in gengenrtl.c as well. */
413 gen_rtx_CONST_INT (mode, arg)
414 enum machine_mode mode ATTRIBUTE_UNUSED;
415 HOST_WIDE_INT arg;
417 void **slot;
419 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
420 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
422 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
423 if (const_true_rtx && arg == STORE_FLAG_VALUE)
424 return const_true_rtx;
425 #endif
427 /* Look up the CONST_INT in the hash table. */
428 slot = htab_find_slot_with_hash (const_int_htab, &arg,
429 (hashval_t) arg, INSERT);
430 if (*slot == 0)
431 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
433 return (rtx) *slot;
437 gen_int_mode (c, mode)
438 HOST_WIDE_INT c;
439 enum machine_mode mode;
441 return GEN_INT (trunc_int_for_mode (c, mode));
444 /* CONST_DOUBLEs might be created from pairs of integers, or from
445 REAL_VALUE_TYPEs. Also, their length is known only at run time,
446 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
448 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
449 hash table. If so, return its counterpart; otherwise add it
450 to the hash table and return it. */
451 static rtx
452 lookup_const_double (real)
453 rtx real;
455 void **slot = htab_find_slot (const_double_htab, real, INSERT);
456 if (*slot == 0)
457 *slot = real;
459 return (rtx) *slot;
462 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
463 VALUE in mode MODE. */
465 const_double_from_real_value (value, mode)
466 REAL_VALUE_TYPE value;
467 enum machine_mode mode;
469 rtx real = rtx_alloc (CONST_DOUBLE);
470 PUT_MODE (real, mode);
472 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
474 return lookup_const_double (real);
477 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
478 of ints: I0 is the low-order word and I1 is the high-order word.
479 Do not use this routine for non-integer modes; convert to
480 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
483 immed_double_const (i0, i1, mode)
484 HOST_WIDE_INT i0, i1;
485 enum machine_mode mode;
487 rtx value;
488 unsigned int i;
490 if (mode != VOIDmode)
492 int width;
493 if (GET_MODE_CLASS (mode) != MODE_INT
494 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
495 /* We can get a 0 for an error mark. */
496 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
497 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
498 abort ();
500 /* We clear out all bits that don't belong in MODE, unless they and
501 our sign bit are all one. So we get either a reasonable negative
502 value or a reasonable unsigned value for this mode. */
503 width = GET_MODE_BITSIZE (mode);
504 if (width < HOST_BITS_PER_WIDE_INT
505 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
506 != ((HOST_WIDE_INT) (-1) << (width - 1))))
507 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
508 else if (width == HOST_BITS_PER_WIDE_INT
509 && ! (i1 == ~0 && i0 < 0))
510 i1 = 0;
511 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
512 /* We cannot represent this value as a constant. */
513 abort ();
515 /* If this would be an entire word for the target, but is not for
516 the host, then sign-extend on the host so that the number will
517 look the same way on the host that it would on the target.
519 For example, when building a 64 bit alpha hosted 32 bit sparc
520 targeted compiler, then we want the 32 bit unsigned value -1 to be
521 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
522 The latter confuses the sparc backend. */
524 if (width < HOST_BITS_PER_WIDE_INT
525 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
526 i0 |= ((HOST_WIDE_INT) (-1) << width);
528 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
529 CONST_INT.
531 ??? Strictly speaking, this is wrong if we create a CONST_INT for
532 a large unsigned constant with the size of MODE being
533 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
534 in a wider mode. In that case we will mis-interpret it as a
535 negative number.
537 Unfortunately, the only alternative is to make a CONST_DOUBLE for
538 any constant in any mode if it is an unsigned constant larger
539 than the maximum signed integer in an int on the host. However,
540 doing this will break everyone that always expects to see a
541 CONST_INT for SImode and smaller.
543 We have always been making CONST_INTs in this case, so nothing
544 new is being broken. */
546 if (width <= HOST_BITS_PER_WIDE_INT)
547 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
550 /* If this integer fits in one word, return a CONST_INT. */
551 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
552 return GEN_INT (i0);
554 /* We use VOIDmode for integers. */
555 value = rtx_alloc (CONST_DOUBLE);
556 PUT_MODE (value, VOIDmode);
558 CONST_DOUBLE_LOW (value) = i0;
559 CONST_DOUBLE_HIGH (value) = i1;
561 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
562 XWINT (value, i) = 0;
564 return lookup_const_double (value);
568 gen_rtx_REG (mode, regno)
569 enum machine_mode mode;
570 unsigned int regno;
572 /* In case the MD file explicitly references the frame pointer, have
573 all such references point to the same frame pointer. This is
574 used during frame pointer elimination to distinguish the explicit
575 references to these registers from pseudos that happened to be
576 assigned to them.
578 If we have eliminated the frame pointer or arg pointer, we will
579 be using it as a normal register, for example as a spill
580 register. In such cases, we might be accessing it in a mode that
581 is not Pmode and therefore cannot use the pre-allocated rtx.
583 Also don't do this when we are making new REGs in reload, since
584 we don't want to get confused with the real pointers. */
586 if (mode == Pmode && !reload_in_progress)
588 if (regno == FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
590 return frame_pointer_rtx;
591 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
592 if (regno == HARD_FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
594 return hard_frame_pointer_rtx;
595 #endif
596 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
597 if (regno == ARG_POINTER_REGNUM)
598 return arg_pointer_rtx;
599 #endif
600 #ifdef RETURN_ADDRESS_POINTER_REGNUM
601 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
602 return return_address_pointer_rtx;
603 #endif
604 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
605 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
606 return pic_offset_table_rtx;
607 if (regno == STACK_POINTER_REGNUM)
608 return stack_pointer_rtx;
611 #if 0
612 /* If the per-function register table has been set up, try to re-use
613 an existing entry in that table to avoid useless generation of RTL.
615 This code is disabled for now until we can fix the various backends
616 which depend on having non-shared hard registers in some cases. Long
617 term we want to re-enable this code as it can significantly cut down
618 on the amount of useless RTL that gets generated.
620 We'll also need to fix some code that runs after reload that wants to
621 set ORIGINAL_REGNO. */
623 if (cfun
624 && cfun->emit
625 && regno_reg_rtx
626 && regno < FIRST_PSEUDO_REGISTER
627 && reg_raw_mode[regno] == mode)
628 return regno_reg_rtx[regno];
629 #endif
631 return gen_raw_REG (mode, regno);
635 gen_rtx_MEM (mode, addr)
636 enum machine_mode mode;
637 rtx addr;
639 rtx rt = gen_rtx_raw_MEM (mode, addr);
641 /* This field is not cleared by the mere allocation of the rtx, so
642 we clear it here. */
643 MEM_ATTRS (rt) = 0;
645 return rt;
649 gen_rtx_SUBREG (mode, reg, offset)
650 enum machine_mode mode;
651 rtx reg;
652 int offset;
654 /* This is the most common failure type.
655 Catch it early so we can see who does it. */
656 if ((offset % GET_MODE_SIZE (mode)) != 0)
657 abort ();
659 /* This check isn't usable right now because combine will
660 throw arbitrary crap like a CALL into a SUBREG in
661 gen_lowpart_for_combine so we must just eat it. */
662 #if 0
663 /* Check for this too. */
664 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
665 abort ();
666 #endif
667 return gen_rtx_raw_SUBREG (mode, reg, offset);
670 /* Generate a SUBREG representing the least-significant part of REG if MODE
671 is smaller than mode of REG, otherwise paradoxical SUBREG. */
674 gen_lowpart_SUBREG (mode, reg)
675 enum machine_mode mode;
676 rtx reg;
678 enum machine_mode inmode;
680 inmode = GET_MODE (reg);
681 if (inmode == VOIDmode)
682 inmode = mode;
683 return gen_rtx_SUBREG (mode, reg,
684 subreg_lowpart_offset (mode, inmode));
687 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
689 ** This routine generates an RTX of the size specified by
690 ** <code>, which is an RTX code. The RTX structure is initialized
691 ** from the arguments <element1> through <elementn>, which are
692 ** interpreted according to the specific RTX type's format. The
693 ** special machine mode associated with the rtx (if any) is specified
694 ** in <mode>.
696 ** gen_rtx can be invoked in a way which resembles the lisp-like
697 ** rtx it will generate. For example, the following rtx structure:
699 ** (plus:QI (mem:QI (reg:SI 1))
700 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
702 ** ...would be generated by the following C code:
704 ** gen_rtx (PLUS, QImode,
705 ** gen_rtx (MEM, QImode,
706 ** gen_rtx (REG, SImode, 1)),
707 ** gen_rtx (MEM, QImode,
708 ** gen_rtx (PLUS, SImode,
709 ** gen_rtx (REG, SImode, 2),
710 ** gen_rtx (REG, SImode, 3)))),
713 /*VARARGS2*/
715 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
717 int i; /* Array indices... */
718 const char *fmt; /* Current rtx's format... */
719 rtx rt_val; /* RTX to return to caller... */
721 VA_OPEN (p, mode);
722 VA_FIXEDARG (p, enum rtx_code, code);
723 VA_FIXEDARG (p, enum machine_mode, mode);
725 switch (code)
727 case CONST_INT:
728 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
729 break;
731 case CONST_DOUBLE:
733 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
734 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
736 rt_val = immed_double_const (arg0, arg1, mode);
738 break;
740 case REG:
741 rt_val = gen_rtx_REG (mode, va_arg (p, int));
742 break;
744 case MEM:
745 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
746 break;
748 default:
749 rt_val = rtx_alloc (code); /* Allocate the storage space. */
750 rt_val->mode = mode; /* Store the machine mode... */
752 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
753 for (i = 0; i < GET_RTX_LENGTH (code); i++)
755 switch (*fmt++)
757 case '0': /* Unused field. */
758 break;
760 case 'i': /* An integer? */
761 XINT (rt_val, i) = va_arg (p, int);
762 break;
764 case 'w': /* A wide integer? */
765 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
766 break;
768 case 's': /* A string? */
769 XSTR (rt_val, i) = va_arg (p, char *);
770 break;
772 case 'e': /* An expression? */
773 case 'u': /* An insn? Same except when printing. */
774 XEXP (rt_val, i) = va_arg (p, rtx);
775 break;
777 case 'E': /* An RTX vector? */
778 XVEC (rt_val, i) = va_arg (p, rtvec);
779 break;
781 case 'b': /* A bitmap? */
782 XBITMAP (rt_val, i) = va_arg (p, bitmap);
783 break;
785 case 't': /* A tree? */
786 XTREE (rt_val, i) = va_arg (p, tree);
787 break;
789 default:
790 abort ();
793 break;
796 VA_CLOSE (p);
797 return rt_val;
800 /* gen_rtvec (n, [rt1, ..., rtn])
802 ** This routine creates an rtvec and stores within it the
803 ** pointers to rtx's which are its arguments.
806 /*VARARGS1*/
807 rtvec
808 gen_rtvec VPARAMS ((int n, ...))
810 int i, save_n;
811 rtx *vector;
813 VA_OPEN (p, n);
814 VA_FIXEDARG (p, int, n);
816 if (n == 0)
817 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
819 vector = (rtx *) alloca (n * sizeof (rtx));
821 for (i = 0; i < n; i++)
822 vector[i] = va_arg (p, rtx);
824 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
825 save_n = n;
826 VA_CLOSE (p);
828 return gen_rtvec_v (save_n, vector);
831 rtvec
832 gen_rtvec_v (n, argp)
833 int n;
834 rtx *argp;
836 int i;
837 rtvec rt_val;
839 if (n == 0)
840 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
842 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
844 for (i = 0; i < n; i++)
845 rt_val->elem[i] = *argp++;
847 return rt_val;
850 /* Generate a REG rtx for a new pseudo register of mode MODE.
851 This pseudo is assigned the next sequential register number. */
854 gen_reg_rtx (mode)
855 enum machine_mode mode;
857 struct function *f = cfun;
858 rtx val;
860 /* Don't let anything called after initial flow analysis create new
861 registers. */
862 if (no_new_pseudos)
863 abort ();
865 if (generating_concat_p
866 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
867 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
869 /* For complex modes, don't make a single pseudo.
870 Instead, make a CONCAT of two pseudos.
871 This allows noncontiguous allocation of the real and imaginary parts,
872 which makes much better code. Besides, allocating DCmode
873 pseudos overstrains reload on some machines like the 386. */
874 rtx realpart, imagpart;
875 enum machine_mode partmode = GET_MODE_INNER (mode);
877 realpart = gen_reg_rtx (partmode);
878 imagpart = gen_reg_rtx (partmode);
879 return gen_rtx_CONCAT (mode, realpart, imagpart);
882 /* Make sure regno_pointer_align, and regno_reg_rtx are large
883 enough to have an element for this pseudo reg number. */
885 if (reg_rtx_no == f->emit->regno_pointer_align_length)
887 int old_size = f->emit->regno_pointer_align_length;
888 char *new;
889 rtx *new1;
891 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
892 memset (new + old_size, 0, old_size);
893 f->emit->regno_pointer_align = (unsigned char *) new;
895 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
896 old_size * 2 * sizeof (rtx));
897 memset (new1 + old_size, 0, old_size * sizeof (rtx));
898 regno_reg_rtx = new1;
900 f->emit->regno_pointer_align_length = old_size * 2;
903 val = gen_raw_REG (mode, reg_rtx_no);
904 regno_reg_rtx[reg_rtx_no++] = val;
905 return val;
908 /* Generate an register with same attributes as REG,
909 but offsetted by OFFSET. */
912 gen_rtx_REG_offset (reg, mode, regno, offset)
913 enum machine_mode mode;
914 unsigned int regno;
915 int offset;
916 rtx reg;
918 rtx new = gen_rtx_REG (mode, regno);
919 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
920 REG_OFFSET (reg) + offset);
921 return new;
924 /* Set the decl for MEM to DECL. */
926 void
927 set_reg_attrs_from_mem (reg, mem)
928 rtx reg;
929 rtx mem;
931 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
932 REG_ATTRS (reg)
933 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
936 /* Set the register attributes for registers contained in PARM_RTX.
937 Use needed values from memory attributes of MEM. */
939 void
940 set_reg_attrs_for_parm (parm_rtx, mem)
941 rtx parm_rtx;
942 rtx mem;
944 if (GET_CODE (parm_rtx) == REG)
945 set_reg_attrs_from_mem (parm_rtx, mem);
946 else if (GET_CODE (parm_rtx) == PARALLEL)
948 /* Check for a NULL entry in the first slot, used to indicate that the
949 parameter goes both on the stack and in registers. */
950 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
951 for (; i < XVECLEN (parm_rtx, 0); i++)
953 rtx x = XVECEXP (parm_rtx, 0, i);
954 if (GET_CODE (XEXP (x, 0)) == REG)
955 REG_ATTRS (XEXP (x, 0))
956 = get_reg_attrs (MEM_EXPR (mem),
957 INTVAL (XEXP (x, 1)));
962 /* Assign the RTX X to declaration T. */
963 void
964 set_decl_rtl (t, x)
965 tree t;
966 rtx x;
968 DECL_CHECK (t)->decl.rtl = x;
970 if (!x)
971 return;
972 /* For register, we maitain the reverse information too. */
973 if (GET_CODE (x) == REG)
974 REG_ATTRS (x) = get_reg_attrs (t, 0);
975 else if (GET_CODE (x) == SUBREG)
976 REG_ATTRS (SUBREG_REG (x))
977 = get_reg_attrs (t, -SUBREG_BYTE (x));
978 if (GET_CODE (x) == CONCAT)
980 if (REG_P (XEXP (x, 0)))
981 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
982 if (REG_P (XEXP (x, 1)))
983 REG_ATTRS (XEXP (x, 1))
984 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
986 if (GET_CODE (x) == PARALLEL)
988 int i;
989 for (i = 0; i < XVECLEN (x, 0); i++)
991 rtx y = XVECEXP (x, 0, i);
992 if (REG_P (XEXP (y, 0)))
993 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
998 /* Identify REG (which may be a CONCAT) as a user register. */
1000 void
1001 mark_user_reg (reg)
1002 rtx reg;
1004 if (GET_CODE (reg) == CONCAT)
1006 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1007 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1009 else if (GET_CODE (reg) == REG)
1010 REG_USERVAR_P (reg) = 1;
1011 else
1012 abort ();
1015 /* Identify REG as a probable pointer register and show its alignment
1016 as ALIGN, if nonzero. */
1018 void
1019 mark_reg_pointer (reg, align)
1020 rtx reg;
1021 int align;
1023 if (! REG_POINTER (reg))
1025 REG_POINTER (reg) = 1;
1027 if (align)
1028 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1030 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1031 /* We can no-longer be sure just how aligned this pointer is */
1032 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1035 /* Return 1 plus largest pseudo reg number used in the current function. */
1038 max_reg_num ()
1040 return reg_rtx_no;
1043 /* Return 1 + the largest label number used so far in the current function. */
1046 max_label_num ()
1048 if (last_label_num && label_num == base_label_num)
1049 return last_label_num;
1050 return label_num;
1053 /* Return first label number used in this function (if any were used). */
1056 get_first_label_num ()
1058 return first_label_num;
1061 /* Return the final regno of X, which is a SUBREG of a hard
1062 register. */
1064 subreg_hard_regno (x, check_mode)
1065 rtx x;
1066 int check_mode;
1068 enum machine_mode mode = GET_MODE (x);
1069 unsigned int byte_offset, base_regno, final_regno;
1070 rtx reg = SUBREG_REG (x);
1072 /* This is where we attempt to catch illegal subregs
1073 created by the compiler. */
1074 if (GET_CODE (x) != SUBREG
1075 || GET_CODE (reg) != REG)
1076 abort ();
1077 base_regno = REGNO (reg);
1078 if (base_regno >= FIRST_PSEUDO_REGISTER)
1079 abort ();
1080 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1081 abort ();
1083 /* Catch non-congruent offsets too. */
1084 byte_offset = SUBREG_BYTE (x);
1085 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1086 abort ();
1088 final_regno = subreg_regno (x);
1090 return final_regno;
1093 /* Return a value representing some low-order bits of X, where the number
1094 of low-order bits is given by MODE. Note that no conversion is done
1095 between floating-point and fixed-point values, rather, the bit
1096 representation is returned.
1098 This function handles the cases in common between gen_lowpart, below,
1099 and two variants in cse.c and combine.c. These are the cases that can
1100 be safely handled at all points in the compilation.
1102 If this is not a case we can handle, return 0. */
1105 gen_lowpart_common (mode, x)
1106 enum machine_mode mode;
1107 rtx x;
1109 int msize = GET_MODE_SIZE (mode);
1110 int xsize = GET_MODE_SIZE (GET_MODE (x));
1111 int offset = 0;
1113 if (GET_MODE (x) == mode)
1114 return x;
1116 /* MODE must occupy no more words than the mode of X. */
1117 if (GET_MODE (x) != VOIDmode
1118 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1119 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1120 return 0;
1122 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1123 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1124 && GET_MODE (x) != VOIDmode && msize > xsize)
1125 return 0;
1127 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1129 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1130 && (GET_MODE_CLASS (mode) == MODE_INT
1131 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1133 /* If we are getting the low-order part of something that has been
1134 sign- or zero-extended, we can either just use the object being
1135 extended or make a narrower extension. If we want an even smaller
1136 piece than the size of the object being extended, call ourselves
1137 recursively.
1139 This case is used mostly by combine and cse. */
1141 if (GET_MODE (XEXP (x, 0)) == mode)
1142 return XEXP (x, 0);
1143 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1144 return gen_lowpart_common (mode, XEXP (x, 0));
1145 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1146 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1148 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1149 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1150 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1151 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1152 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1153 && GET_MODE (x) == VOIDmode)
1154 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1155 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1156 from the low-order part of the constant. */
1157 else if ((GET_MODE_CLASS (mode) == MODE_INT
1158 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1159 && GET_MODE (x) == VOIDmode
1160 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1162 /* If MODE is twice the host word size, X is already the desired
1163 representation. Otherwise, if MODE is wider than a word, we can't
1164 do this. If MODE is exactly a word, return just one CONST_INT. */
1166 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1167 return x;
1168 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1169 return 0;
1170 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1171 return (GET_CODE (x) == CONST_INT ? x
1172 : GEN_INT (CONST_DOUBLE_LOW (x)));
1173 else
1175 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1176 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1177 : CONST_DOUBLE_LOW (x));
1179 /* Sign extend to HOST_WIDE_INT. */
1180 val = trunc_int_for_mode (val, mode);
1182 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1183 : GEN_INT (val));
1187 /* The floating-point emulator can handle all conversions between
1188 FP and integer operands. This simplifies reload because it
1189 doesn't have to deal with constructs like (subreg:DI
1190 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1191 /* Single-precision floats are always 32-bits and double-precision
1192 floats are always 64-bits. */
1194 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1195 && GET_MODE_BITSIZE (mode) == 32
1196 && GET_CODE (x) == CONST_INT)
1198 REAL_VALUE_TYPE r;
1199 long i = INTVAL (x);
1201 real_from_target (&r, &i, mode);
1202 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1204 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1205 && GET_MODE_BITSIZE (mode) == 64
1206 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1207 && GET_MODE (x) == VOIDmode)
1209 REAL_VALUE_TYPE r;
1210 HOST_WIDE_INT low, high;
1211 long i[2];
1213 if (GET_CODE (x) == CONST_INT)
1215 low = INTVAL (x);
1216 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1218 else
1220 low = CONST_DOUBLE_LOW (x);
1221 high = CONST_DOUBLE_HIGH (x);
1224 if (HOST_BITS_PER_WIDE_INT > 32)
1225 high = low >> 31 >> 1;
1227 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1228 target machine. */
1229 if (WORDS_BIG_ENDIAN)
1230 i[0] = high, i[1] = low;
1231 else
1232 i[0] = low, i[1] = high;
1234 real_from_target (&r, i, mode);
1235 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1237 else if ((GET_MODE_CLASS (mode) == MODE_INT
1238 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1239 && GET_CODE (x) == CONST_DOUBLE
1240 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1242 REAL_VALUE_TYPE r;
1243 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1244 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1246 /* Convert 'r' into an array of four 32-bit words in target word
1247 order. */
1248 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1249 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1251 case 32:
1252 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1253 i[1] = 0;
1254 i[2] = 0;
1255 i[3 - 3 * endian] = 0;
1256 break;
1257 case 64:
1258 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1259 i[2 - 2 * endian] = 0;
1260 i[3 - 2 * endian] = 0;
1261 break;
1262 case 96:
1263 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1264 i[3 - 3 * endian] = 0;
1265 break;
1266 case 128:
1267 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1268 break;
1269 default:
1270 abort ();
1272 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1273 and return it. */
1274 #if HOST_BITS_PER_WIDE_INT == 32
1275 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1276 #else
1277 if (HOST_BITS_PER_WIDE_INT != 64)
1278 abort ();
1280 return immed_double_const ((((unsigned long) i[3 * endian])
1281 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1282 (((unsigned long) i[2 - endian])
1283 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1284 mode);
1285 #endif
1288 /* Otherwise, we can't do this. */
1289 return 0;
1292 /* Return the real part (which has mode MODE) of a complex value X.
1293 This always comes at the low address in memory. */
1296 gen_realpart (mode, x)
1297 enum machine_mode mode;
1298 rtx x;
1300 if (WORDS_BIG_ENDIAN
1301 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1302 && REG_P (x)
1303 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1304 internal_error
1305 ("can't access real part of complex value in hard register");
1306 else if (WORDS_BIG_ENDIAN)
1307 return gen_highpart (mode, x);
1308 else
1309 return gen_lowpart (mode, x);
1312 /* Return the imaginary part (which has mode MODE) of a complex value X.
1313 This always comes at the high address in memory. */
1316 gen_imagpart (mode, x)
1317 enum machine_mode mode;
1318 rtx x;
1320 if (WORDS_BIG_ENDIAN)
1321 return gen_lowpart (mode, x);
1322 else if (! WORDS_BIG_ENDIAN
1323 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1324 && REG_P (x)
1325 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1326 internal_error
1327 ("can't access imaginary part of complex value in hard register");
1328 else
1329 return gen_highpart (mode, x);
1332 /* Return 1 iff X, assumed to be a SUBREG,
1333 refers to the real part of the complex value in its containing reg.
1334 Complex values are always stored with the real part in the first word,
1335 regardless of WORDS_BIG_ENDIAN. */
1338 subreg_realpart_p (x)
1339 rtx x;
1341 if (GET_CODE (x) != SUBREG)
1342 abort ();
1344 return ((unsigned int) SUBREG_BYTE (x)
1345 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1348 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1349 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1350 least-significant part of X.
1351 MODE specifies how big a part of X to return;
1352 it usually should not be larger than a word.
1353 If X is a MEM whose address is a QUEUED, the value may be so also. */
1356 gen_lowpart (mode, x)
1357 enum machine_mode mode;
1358 rtx x;
1360 rtx result = gen_lowpart_common (mode, x);
1362 if (result)
1363 return result;
1364 else if (GET_CODE (x) == REG)
1366 /* Must be a hard reg that's not valid in MODE. */
1367 result = gen_lowpart_common (mode, copy_to_reg (x));
1368 if (result == 0)
1369 abort ();
1370 return result;
1372 else if (GET_CODE (x) == MEM)
1374 /* The only additional case we can do is MEM. */
1375 int offset = 0;
1377 /* The following exposes the use of "x" to CSE. */
1378 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1379 && SCALAR_INT_MODE_P (GET_MODE (x))
1380 && ! no_new_pseudos)
1381 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1383 if (WORDS_BIG_ENDIAN)
1384 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1385 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1387 if (BYTES_BIG_ENDIAN)
1388 /* Adjust the address so that the address-after-the-data
1389 is unchanged. */
1390 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1391 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1393 return adjust_address (x, mode, offset);
1395 else if (GET_CODE (x) == ADDRESSOF)
1396 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1397 else
1398 abort ();
1401 /* Like `gen_lowpart', but refer to the most significant part.
1402 This is used to access the imaginary part of a complex number. */
1405 gen_highpart (mode, x)
1406 enum machine_mode mode;
1407 rtx x;
1409 unsigned int msize = GET_MODE_SIZE (mode);
1410 rtx result;
1412 /* This case loses if X is a subreg. To catch bugs early,
1413 complain if an invalid MODE is used even in other cases. */
1414 if (msize > UNITS_PER_WORD
1415 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1416 abort ();
1418 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1419 subreg_highpart_offset (mode, GET_MODE (x)));
1421 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1422 the target if we have a MEM. gen_highpart must return a valid operand,
1423 emitting code if necessary to do so. */
1424 if (result != NULL_RTX && GET_CODE (result) == MEM)
1425 result = validize_mem (result);
1427 if (!result)
1428 abort ();
1429 return result;
1432 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1433 be VOIDmode constant. */
1435 gen_highpart_mode (outermode, innermode, exp)
1436 enum machine_mode outermode, innermode;
1437 rtx exp;
1439 if (GET_MODE (exp) != VOIDmode)
1441 if (GET_MODE (exp) != innermode)
1442 abort ();
1443 return gen_highpart (outermode, exp);
1445 return simplify_gen_subreg (outermode, exp, innermode,
1446 subreg_highpart_offset (outermode, innermode));
1449 /* Return offset in bytes to get OUTERMODE low part
1450 of the value in mode INNERMODE stored in memory in target format. */
1452 unsigned int
1453 subreg_lowpart_offset (outermode, innermode)
1454 enum machine_mode outermode, innermode;
1456 unsigned int offset = 0;
1457 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1459 if (difference > 0)
1461 if (WORDS_BIG_ENDIAN)
1462 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1463 if (BYTES_BIG_ENDIAN)
1464 offset += difference % UNITS_PER_WORD;
1467 return offset;
1470 /* Return offset in bytes to get OUTERMODE high part
1471 of the value in mode INNERMODE stored in memory in target format. */
1472 unsigned int
1473 subreg_highpart_offset (outermode, innermode)
1474 enum machine_mode outermode, innermode;
1476 unsigned int offset = 0;
1477 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1479 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1480 abort ();
1482 if (difference > 0)
1484 if (! WORDS_BIG_ENDIAN)
1485 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1486 if (! BYTES_BIG_ENDIAN)
1487 offset += difference % UNITS_PER_WORD;
1490 return offset;
1493 /* Return 1 iff X, assumed to be a SUBREG,
1494 refers to the least significant part of its containing reg.
1495 If X is not a SUBREG, always return 1 (it is its own low part!). */
1498 subreg_lowpart_p (x)
1499 rtx x;
1501 if (GET_CODE (x) != SUBREG)
1502 return 1;
1503 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1504 return 0;
1506 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1507 == SUBREG_BYTE (x));
1511 /* Helper routine for all the constant cases of operand_subword.
1512 Some places invoke this directly. */
1515 constant_subword (op, offset, mode)
1516 rtx op;
1517 int offset;
1518 enum machine_mode mode;
1520 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1521 HOST_WIDE_INT val;
1523 /* If OP is already an integer word, return it. */
1524 if (GET_MODE_CLASS (mode) == MODE_INT
1525 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1526 return op;
1528 /* The output is some bits, the width of the target machine's word.
1529 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1530 host can't. */
1531 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1532 && GET_MODE_CLASS (mode) == MODE_FLOAT
1533 && GET_MODE_BITSIZE (mode) == 64
1534 && GET_CODE (op) == CONST_DOUBLE)
1536 long k[2];
1537 REAL_VALUE_TYPE rv;
1539 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1540 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1542 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1543 which the words are written depends on the word endianness.
1544 ??? This is a potential portability problem and should
1545 be fixed at some point.
1547 We must exercise caution with the sign bit. By definition there
1548 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1549 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1550 So we explicitly mask and sign-extend as necessary. */
1551 if (BITS_PER_WORD == 32)
1553 val = k[offset];
1554 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1555 return GEN_INT (val);
1557 #if HOST_BITS_PER_WIDE_INT >= 64
1558 else if (BITS_PER_WORD >= 64 && offset == 0)
1560 val = k[! WORDS_BIG_ENDIAN];
1561 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1562 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1563 return GEN_INT (val);
1565 #endif
1566 else if (BITS_PER_WORD == 16)
1568 val = k[offset >> 1];
1569 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1570 val >>= 16;
1571 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1572 return GEN_INT (val);
1574 else
1575 abort ();
1577 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1578 && GET_MODE_CLASS (mode) == MODE_FLOAT
1579 && GET_MODE_BITSIZE (mode) > 64
1580 && GET_CODE (op) == CONST_DOUBLE)
1582 long k[4];
1583 REAL_VALUE_TYPE rv;
1585 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1586 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1588 if (BITS_PER_WORD == 32)
1590 val = k[offset];
1591 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1592 return GEN_INT (val);
1594 #if HOST_BITS_PER_WIDE_INT >= 64
1595 else if (BITS_PER_WORD >= 64 && offset <= 1)
1597 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1598 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1599 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1600 return GEN_INT (val);
1602 #endif
1603 else
1604 abort ();
1607 /* Single word float is a little harder, since single- and double-word
1608 values often do not have the same high-order bits. We have already
1609 verified that we want the only defined word of the single-word value. */
1610 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1611 && GET_MODE_BITSIZE (mode) == 32
1612 && GET_CODE (op) == CONST_DOUBLE)
1614 long l;
1615 REAL_VALUE_TYPE rv;
1617 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1618 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1620 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1621 val = l;
1622 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1624 if (BITS_PER_WORD == 16)
1626 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1627 val >>= 16;
1628 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1631 return GEN_INT (val);
1634 /* The only remaining cases that we can handle are integers.
1635 Convert to proper endianness now since these cases need it.
1636 At this point, offset == 0 means the low-order word.
1638 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1639 in general. However, if OP is (const_int 0), we can just return
1640 it for any word. */
1642 if (op == const0_rtx)
1643 return op;
1645 if (GET_MODE_CLASS (mode) != MODE_INT
1646 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1647 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1648 return 0;
1650 if (WORDS_BIG_ENDIAN)
1651 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1653 /* Find out which word on the host machine this value is in and get
1654 it from the constant. */
1655 val = (offset / size_ratio == 0
1656 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1657 : (GET_CODE (op) == CONST_INT
1658 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1660 /* Get the value we want into the low bits of val. */
1661 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1662 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1664 val = trunc_int_for_mode (val, word_mode);
1666 return GEN_INT (val);
1669 /* Return subword OFFSET of operand OP.
1670 The word number, OFFSET, is interpreted as the word number starting
1671 at the low-order address. OFFSET 0 is the low-order word if not
1672 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1674 If we cannot extract the required word, we return zero. Otherwise,
1675 an rtx corresponding to the requested word will be returned.
1677 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1678 reload has completed, a valid address will always be returned. After
1679 reload, if a valid address cannot be returned, we return zero.
1681 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1682 it is the responsibility of the caller.
1684 MODE is the mode of OP in case it is a CONST_INT.
1686 ??? This is still rather broken for some cases. The problem for the
1687 moment is that all callers of this thing provide no 'goal mode' to
1688 tell us to work with. This exists because all callers were written
1689 in a word based SUBREG world.
1690 Now use of this function can be deprecated by simplify_subreg in most
1691 cases.
1695 operand_subword (op, offset, validate_address, mode)
1696 rtx op;
1697 unsigned int offset;
1698 int validate_address;
1699 enum machine_mode mode;
1701 if (mode == VOIDmode)
1702 mode = GET_MODE (op);
1704 if (mode == VOIDmode)
1705 abort ();
1707 /* If OP is narrower than a word, fail. */
1708 if (mode != BLKmode
1709 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1710 return 0;
1712 /* If we want a word outside OP, return zero. */
1713 if (mode != BLKmode
1714 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1715 return const0_rtx;
1717 /* Form a new MEM at the requested address. */
1718 if (GET_CODE (op) == MEM)
1720 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1722 if (! validate_address)
1723 return new;
1725 else if (reload_completed)
1727 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1728 return 0;
1730 else
1731 return replace_equiv_address (new, XEXP (new, 0));
1734 /* Rest can be handled by simplify_subreg. */
1735 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1738 /* Similar to `operand_subword', but never return 0. If we can't extract
1739 the required subword, put OP into a register and try again. If that fails,
1740 abort. We always validate the address in this case.
1742 MODE is the mode of OP, in case it is CONST_INT. */
1745 operand_subword_force (op, offset, mode)
1746 rtx op;
1747 unsigned int offset;
1748 enum machine_mode mode;
1750 rtx result = operand_subword (op, offset, 1, mode);
1752 if (result)
1753 return result;
1755 if (mode != BLKmode && mode != VOIDmode)
1757 /* If this is a register which can not be accessed by words, copy it
1758 to a pseudo register. */
1759 if (GET_CODE (op) == REG)
1760 op = copy_to_reg (op);
1761 else
1762 op = force_reg (mode, op);
1765 result = operand_subword (op, offset, 1, mode);
1766 if (result == 0)
1767 abort ();
1769 return result;
1772 /* Given a compare instruction, swap the operands.
1773 A test instruction is changed into a compare of 0 against the operand. */
1775 void
1776 reverse_comparison (insn)
1777 rtx insn;
1779 rtx body = PATTERN (insn);
1780 rtx comp;
1782 if (GET_CODE (body) == SET)
1783 comp = SET_SRC (body);
1784 else
1785 comp = SET_SRC (XVECEXP (body, 0, 0));
1787 if (GET_CODE (comp) == COMPARE)
1789 rtx op0 = XEXP (comp, 0);
1790 rtx op1 = XEXP (comp, 1);
1791 XEXP (comp, 0) = op1;
1792 XEXP (comp, 1) = op0;
1794 else
1796 rtx new = gen_rtx_COMPARE (VOIDmode,
1797 CONST0_RTX (GET_MODE (comp)), comp);
1798 if (GET_CODE (body) == SET)
1799 SET_SRC (body) = new;
1800 else
1801 SET_SRC (XVECEXP (body, 0, 0)) = new;
1805 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1806 or (2) a component ref of something variable. Represent the later with
1807 a NULL expression. */
1809 static tree
1810 component_ref_for_mem_expr (ref)
1811 tree ref;
1813 tree inner = TREE_OPERAND (ref, 0);
1815 if (TREE_CODE (inner) == COMPONENT_REF)
1816 inner = component_ref_for_mem_expr (inner);
1817 else
1819 tree placeholder_ptr = 0;
1821 /* Now remove any conversions: they don't change what the underlying
1822 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1823 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1824 || TREE_CODE (inner) == NON_LVALUE_EXPR
1825 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1826 || TREE_CODE (inner) == SAVE_EXPR
1827 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1828 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1829 inner = find_placeholder (inner, &placeholder_ptr);
1830 else
1831 inner = TREE_OPERAND (inner, 0);
1833 if (! DECL_P (inner))
1834 inner = NULL_TREE;
1837 if (inner == TREE_OPERAND (ref, 0))
1838 return ref;
1839 else
1840 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1841 TREE_OPERAND (ref, 1));
1844 /* Given REF, a MEM, and T, either the type of X or the expression
1845 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1846 if we are making a new object of this type. BITPOS is nonzero if
1847 there is an offset outstanding on T that will be applied later. */
1849 void
1850 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1851 rtx ref;
1852 tree t;
1853 int objectp;
1854 HOST_WIDE_INT bitpos;
1856 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1857 tree expr = MEM_EXPR (ref);
1858 rtx offset = MEM_OFFSET (ref);
1859 rtx size = MEM_SIZE (ref);
1860 unsigned int align = MEM_ALIGN (ref);
1861 HOST_WIDE_INT apply_bitpos = 0;
1862 tree type;
1864 /* It can happen that type_for_mode was given a mode for which there
1865 is no language-level type. In which case it returns NULL, which
1866 we can see here. */
1867 if (t == NULL_TREE)
1868 return;
1870 type = TYPE_P (t) ? t : TREE_TYPE (t);
1872 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1873 wrong answer, as it assumes that DECL_RTL already has the right alias
1874 info. Callers should not set DECL_RTL until after the call to
1875 set_mem_attributes. */
1876 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1877 abort ();
1879 /* Get the alias set from the expression or type (perhaps using a
1880 front-end routine) and use it. */
1881 alias = get_alias_set (t);
1883 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1884 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1885 RTX_UNCHANGING_P (ref)
1886 |= ((lang_hooks.honor_readonly
1887 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1888 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1890 /* If we are making an object of this type, or if this is a DECL, we know
1891 that it is a scalar if the type is not an aggregate. */
1892 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1893 MEM_SCALAR_P (ref) = 1;
1895 /* We can set the alignment from the type if we are making an object,
1896 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1897 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1898 align = MAX (align, TYPE_ALIGN (type));
1900 /* If the size is known, we can set that. */
1901 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1902 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1904 /* If T is not a type, we may be able to deduce some more information about
1905 the expression. */
1906 if (! TYPE_P (t))
1908 maybe_set_unchanging (ref, t);
1909 if (TREE_THIS_VOLATILE (t))
1910 MEM_VOLATILE_P (ref) = 1;
1912 /* Now remove any conversions: they don't change what the underlying
1913 object is. Likewise for SAVE_EXPR. */
1914 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1915 || TREE_CODE (t) == NON_LVALUE_EXPR
1916 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1917 || TREE_CODE (t) == SAVE_EXPR)
1918 t = TREE_OPERAND (t, 0);
1920 /* If this expression can't be addressed (e.g., it contains a reference
1921 to a non-addressable field), show we don't change its alias set. */
1922 if (! can_address_p (t))
1923 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1925 /* If this is a decl, set the attributes of the MEM from it. */
1926 if (DECL_P (t))
1928 expr = t;
1929 offset = const0_rtx;
1930 apply_bitpos = bitpos;
1931 size = (DECL_SIZE_UNIT (t)
1932 && host_integerp (DECL_SIZE_UNIT (t), 1)
1933 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1934 align = DECL_ALIGN (t);
1937 /* If this is a constant, we know the alignment. */
1938 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1940 align = TYPE_ALIGN (type);
1941 #ifdef CONSTANT_ALIGNMENT
1942 align = CONSTANT_ALIGNMENT (t, align);
1943 #endif
1946 /* If this is a field reference and not a bit-field, record it. */
1947 /* ??? There is some information that can be gleened from bit-fields,
1948 such as the word offset in the structure that might be modified.
1949 But skip it for now. */
1950 else if (TREE_CODE (t) == COMPONENT_REF
1951 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1953 expr = component_ref_for_mem_expr (t);
1954 offset = const0_rtx;
1955 apply_bitpos = bitpos;
1956 /* ??? Any reason the field size would be different than
1957 the size we got from the type? */
1960 /* If this is an array reference, look for an outer field reference. */
1961 else if (TREE_CODE (t) == ARRAY_REF)
1963 tree off_tree = size_zero_node;
1967 tree index = TREE_OPERAND (t, 1);
1968 tree array = TREE_OPERAND (t, 0);
1969 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1970 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1971 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1973 /* We assume all arrays have sizes that are a multiple of a byte.
1974 First subtract the lower bound, if any, in the type of the
1975 index, then convert to sizetype and multiply by the size of the
1976 array element. */
1977 if (low_bound != 0 && ! integer_zerop (low_bound))
1978 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1979 index, low_bound));
1981 /* If the index has a self-referential type, pass it to a
1982 WITH_RECORD_EXPR; if the component size is, pass our
1983 component to one. */
1984 if (! TREE_CONSTANT (index)
1985 && contains_placeholder_p (index))
1986 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1987 if (! TREE_CONSTANT (unit_size)
1988 && contains_placeholder_p (unit_size))
1989 unit_size = build (WITH_RECORD_EXPR, sizetype,
1990 unit_size, array);
1992 off_tree
1993 = fold (build (PLUS_EXPR, sizetype,
1994 fold (build (MULT_EXPR, sizetype,
1995 index,
1996 unit_size)),
1997 off_tree));
1998 t = TREE_OPERAND (t, 0);
2000 while (TREE_CODE (t) == ARRAY_REF);
2002 if (DECL_P (t))
2004 expr = t;
2005 offset = NULL;
2006 if (host_integerp (off_tree, 1))
2008 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2009 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2010 align = DECL_ALIGN (t);
2011 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2012 align = aoff;
2013 offset = GEN_INT (ioff);
2014 apply_bitpos = bitpos;
2017 else if (TREE_CODE (t) == COMPONENT_REF)
2019 expr = component_ref_for_mem_expr (t);
2020 if (host_integerp (off_tree, 1))
2022 offset = GEN_INT (tree_low_cst (off_tree, 1));
2023 apply_bitpos = bitpos;
2025 /* ??? Any reason the field size would be different than
2026 the size we got from the type? */
2028 else if (flag_argument_noalias > 1
2029 && TREE_CODE (t) == INDIRECT_REF
2030 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2032 expr = t;
2033 offset = NULL;
2037 /* If this is a Fortran indirect argument reference, record the
2038 parameter decl. */
2039 else if (flag_argument_noalias > 1
2040 && TREE_CODE (t) == INDIRECT_REF
2041 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2043 expr = t;
2044 offset = NULL;
2048 /* If we modified OFFSET based on T, then subtract the outstanding
2049 bit position offset. Similarly, increase the size of the accessed
2050 object to contain the negative offset. */
2051 if (apply_bitpos)
2053 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2054 if (size)
2055 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2058 /* Now set the attributes we computed above. */
2059 MEM_ATTRS (ref)
2060 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2062 /* If this is already known to be a scalar or aggregate, we are done. */
2063 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2064 return;
2066 /* If it is a reference into an aggregate, this is part of an aggregate.
2067 Otherwise we don't know. */
2068 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2069 || TREE_CODE (t) == ARRAY_RANGE_REF
2070 || TREE_CODE (t) == BIT_FIELD_REF)
2071 MEM_IN_STRUCT_P (ref) = 1;
2074 void
2075 set_mem_attributes (ref, t, objectp)
2076 rtx ref;
2077 tree t;
2078 int objectp;
2080 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2083 /* Set the decl for MEM to DECL. */
2085 void
2086 set_mem_attrs_from_reg (mem, reg)
2087 rtx mem;
2088 rtx reg;
2090 MEM_ATTRS (mem)
2091 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2092 GEN_INT (REG_OFFSET (reg)),
2093 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2096 /* Set the alias set of MEM to SET. */
2098 void
2099 set_mem_alias_set (mem, set)
2100 rtx mem;
2101 HOST_WIDE_INT set;
2103 #ifdef ENABLE_CHECKING
2104 /* If the new and old alias sets don't conflict, something is wrong. */
2105 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2106 abort ();
2107 #endif
2109 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2110 MEM_SIZE (mem), MEM_ALIGN (mem),
2111 GET_MODE (mem));
2114 /* Set the alignment of MEM to ALIGN bits. */
2116 void
2117 set_mem_align (mem, align)
2118 rtx mem;
2119 unsigned int align;
2121 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2122 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2123 GET_MODE (mem));
2126 /* Set the expr for MEM to EXPR. */
2128 void
2129 set_mem_expr (mem, expr)
2130 rtx mem;
2131 tree expr;
2133 MEM_ATTRS (mem)
2134 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2135 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2138 /* Set the offset of MEM to OFFSET. */
2140 void
2141 set_mem_offset (mem, offset)
2142 rtx mem, offset;
2144 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2145 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2146 GET_MODE (mem));
2149 /* Set the size of MEM to SIZE. */
2151 void
2152 set_mem_size (mem, size)
2153 rtx mem, size;
2155 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2156 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2157 GET_MODE (mem));
2160 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2161 and its address changed to ADDR. (VOIDmode means don't change the mode.
2162 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2163 returned memory location is required to be valid. The memory
2164 attributes are not changed. */
2166 static rtx
2167 change_address_1 (memref, mode, addr, validate)
2168 rtx memref;
2169 enum machine_mode mode;
2170 rtx addr;
2171 int validate;
2173 rtx new;
2175 if (GET_CODE (memref) != MEM)
2176 abort ();
2177 if (mode == VOIDmode)
2178 mode = GET_MODE (memref);
2179 if (addr == 0)
2180 addr = XEXP (memref, 0);
2182 if (validate)
2184 if (reload_in_progress || reload_completed)
2186 if (! memory_address_p (mode, addr))
2187 abort ();
2189 else
2190 addr = memory_address (mode, addr);
2193 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2194 return memref;
2196 new = gen_rtx_MEM (mode, addr);
2197 MEM_COPY_ATTRIBUTES (new, memref);
2198 return new;
2201 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2202 way we are changing MEMREF, so we only preserve the alias set. */
2205 change_address (memref, mode, addr)
2206 rtx memref;
2207 enum machine_mode mode;
2208 rtx addr;
2210 rtx new = change_address_1 (memref, mode, addr, 1);
2211 enum machine_mode mmode = GET_MODE (new);
2213 MEM_ATTRS (new)
2214 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2215 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2216 (mmode == BLKmode ? BITS_PER_UNIT
2217 : GET_MODE_ALIGNMENT (mmode)),
2218 mmode);
2220 return new;
2223 /* Return a memory reference like MEMREF, but with its mode changed
2224 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2225 nonzero, the memory address is forced to be valid.
2226 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2227 and caller is responsible for adjusting MEMREF base register. */
2230 adjust_address_1 (memref, mode, offset, validate, adjust)
2231 rtx memref;
2232 enum machine_mode mode;
2233 HOST_WIDE_INT offset;
2234 int validate, adjust;
2236 rtx addr = XEXP (memref, 0);
2237 rtx new;
2238 rtx memoffset = MEM_OFFSET (memref);
2239 rtx size = 0;
2240 unsigned int memalign = MEM_ALIGN (memref);
2242 /* ??? Prefer to create garbage instead of creating shared rtl.
2243 This may happen even if offset is nonzero -- consider
2244 (plus (plus reg reg) const_int) -- so do this always. */
2245 addr = copy_rtx (addr);
2247 if (adjust)
2249 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2250 object, we can merge it into the LO_SUM. */
2251 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2252 && offset >= 0
2253 && (unsigned HOST_WIDE_INT) offset
2254 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2255 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2256 plus_constant (XEXP (addr, 1), offset));
2257 else
2258 addr = plus_constant (addr, offset);
2261 new = change_address_1 (memref, mode, addr, validate);
2263 /* Compute the new values of the memory attributes due to this adjustment.
2264 We add the offsets and update the alignment. */
2265 if (memoffset)
2266 memoffset = GEN_INT (offset + INTVAL (memoffset));
2268 /* Compute the new alignment by taking the MIN of the alignment and the
2269 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2270 if zero. */
2271 if (offset != 0)
2272 memalign
2273 = MIN (memalign,
2274 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2276 /* We can compute the size in a number of ways. */
2277 if (GET_MODE (new) != BLKmode)
2278 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2279 else if (MEM_SIZE (memref))
2280 size = plus_constant (MEM_SIZE (memref), -offset);
2282 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2283 memoffset, size, memalign, GET_MODE (new));
2285 /* At some point, we should validate that this offset is within the object,
2286 if all the appropriate values are known. */
2287 return new;
2290 /* Return a memory reference like MEMREF, but with its mode changed
2291 to MODE and its address changed to ADDR, which is assumed to be
2292 MEMREF offseted by OFFSET bytes. If VALIDATE is
2293 nonzero, the memory address is forced to be valid. */
2296 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2297 rtx memref;
2298 enum machine_mode mode;
2299 rtx addr;
2300 HOST_WIDE_INT offset;
2301 int validate;
2303 memref = change_address_1 (memref, VOIDmode, addr, validate);
2304 return adjust_address_1 (memref, mode, offset, validate, 0);
2307 /* Return a memory reference like MEMREF, but whose address is changed by
2308 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2309 known to be in OFFSET (possibly 1). */
2312 offset_address (memref, offset, pow2)
2313 rtx memref;
2314 rtx offset;
2315 HOST_WIDE_INT pow2;
2317 rtx new, addr = XEXP (memref, 0);
2319 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2321 /* At this point we don't know _why_ the address is invalid. It
2322 could have secondary memory refereces, multiplies or anything.
2324 However, if we did go and rearrange things, we can wind up not
2325 being able to recognize the magic around pic_offset_table_rtx.
2326 This stuff is fragile, and is yet another example of why it is
2327 bad to expose PIC machinery too early. */
2328 if (! memory_address_p (GET_MODE (memref), new)
2329 && GET_CODE (addr) == PLUS
2330 && XEXP (addr, 0) == pic_offset_table_rtx)
2332 addr = force_reg (GET_MODE (addr), addr);
2333 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2336 update_temp_slot_address (XEXP (memref, 0), new);
2337 new = change_address_1 (memref, VOIDmode, new, 1);
2339 /* Update the alignment to reflect the offset. Reset the offset, which
2340 we don't know. */
2341 MEM_ATTRS (new)
2342 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2343 MIN (MEM_ALIGN (memref),
2344 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2345 GET_MODE (new));
2346 return new;
2349 /* Return a memory reference like MEMREF, but with its address changed to
2350 ADDR. The caller is asserting that the actual piece of memory pointed
2351 to is the same, just the form of the address is being changed, such as
2352 by putting something into a register. */
2355 replace_equiv_address (memref, addr)
2356 rtx memref;
2357 rtx addr;
2359 /* change_address_1 copies the memory attribute structure without change
2360 and that's exactly what we want here. */
2361 update_temp_slot_address (XEXP (memref, 0), addr);
2362 return change_address_1 (memref, VOIDmode, addr, 1);
2365 /* Likewise, but the reference is not required to be valid. */
2368 replace_equiv_address_nv (memref, addr)
2369 rtx memref;
2370 rtx addr;
2372 return change_address_1 (memref, VOIDmode, addr, 0);
2375 /* Return a memory reference like MEMREF, but with its mode widened to
2376 MODE and offset by OFFSET. This would be used by targets that e.g.
2377 cannot issue QImode memory operations and have to use SImode memory
2378 operations plus masking logic. */
2381 widen_memory_access (memref, mode, offset)
2382 rtx memref;
2383 enum machine_mode mode;
2384 HOST_WIDE_INT offset;
2386 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2387 tree expr = MEM_EXPR (new);
2388 rtx memoffset = MEM_OFFSET (new);
2389 unsigned int size = GET_MODE_SIZE (mode);
2391 /* If we don't know what offset we were at within the expression, then
2392 we can't know if we've overstepped the bounds. */
2393 if (! memoffset)
2394 expr = NULL_TREE;
2396 while (expr)
2398 if (TREE_CODE (expr) == COMPONENT_REF)
2400 tree field = TREE_OPERAND (expr, 1);
2402 if (! DECL_SIZE_UNIT (field))
2404 expr = NULL_TREE;
2405 break;
2408 /* Is the field at least as large as the access? If so, ok,
2409 otherwise strip back to the containing structure. */
2410 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2411 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2412 && INTVAL (memoffset) >= 0)
2413 break;
2415 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2417 expr = NULL_TREE;
2418 break;
2421 expr = TREE_OPERAND (expr, 0);
2422 memoffset = (GEN_INT (INTVAL (memoffset)
2423 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2424 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2425 / BITS_PER_UNIT)));
2427 /* Similarly for the decl. */
2428 else if (DECL_P (expr)
2429 && DECL_SIZE_UNIT (expr)
2430 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2431 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2432 && (! memoffset || INTVAL (memoffset) >= 0))
2433 break;
2434 else
2436 /* The widened memory access overflows the expression, which means
2437 that it could alias another expression. Zap it. */
2438 expr = NULL_TREE;
2439 break;
2443 if (! expr)
2444 memoffset = NULL_RTX;
2446 /* The widened memory may alias other stuff, so zap the alias set. */
2447 /* ??? Maybe use get_alias_set on any remaining expression. */
2449 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2450 MEM_ALIGN (new), mode);
2452 return new;
2455 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2458 gen_label_rtx ()
2460 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2461 NULL, label_num++, NULL);
2464 /* For procedure integration. */
2466 /* Install new pointers to the first and last insns in the chain.
2467 Also, set cur_insn_uid to one higher than the last in use.
2468 Used for an inline-procedure after copying the insn chain. */
2470 void
2471 set_new_first_and_last_insn (first, last)
2472 rtx first, last;
2474 rtx insn;
2476 first_insn = first;
2477 last_insn = last;
2478 cur_insn_uid = 0;
2480 for (insn = first; insn; insn = NEXT_INSN (insn))
2481 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2483 cur_insn_uid++;
2486 /* Set the range of label numbers found in the current function.
2487 This is used when belatedly compiling an inline function. */
2489 void
2490 set_new_first_and_last_label_num (first, last)
2491 int first, last;
2493 base_label_num = label_num;
2494 first_label_num = first;
2495 last_label_num = last;
2498 /* Set the last label number found in the current function.
2499 This is used when belatedly compiling an inline function. */
2501 void
2502 set_new_last_label_num (last)
2503 int last;
2505 base_label_num = label_num;
2506 last_label_num = last;
2509 /* Restore all variables describing the current status from the structure *P.
2510 This is used after a nested function. */
2512 void
2513 restore_emit_status (p)
2514 struct function *p ATTRIBUTE_UNUSED;
2516 last_label_num = 0;
2519 /* Go through all the RTL insn bodies and copy any invalid shared
2520 structure. This routine should only be called once. */
2522 void
2523 unshare_all_rtl (fndecl, insn)
2524 tree fndecl;
2525 rtx insn;
2527 tree decl;
2529 /* Make sure that virtual parameters are not shared. */
2530 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2531 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2533 /* Make sure that virtual stack slots are not shared. */
2534 unshare_all_decls (DECL_INITIAL (fndecl));
2536 /* Unshare just about everything else. */
2537 unshare_all_rtl_1 (insn);
2539 /* Make sure the addresses of stack slots found outside the insn chain
2540 (such as, in DECL_RTL of a variable) are not shared
2541 with the insn chain.
2543 This special care is necessary when the stack slot MEM does not
2544 actually appear in the insn chain. If it does appear, its address
2545 is unshared from all else at that point. */
2546 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2549 /* Go through all the RTL insn bodies and copy any invalid shared
2550 structure, again. This is a fairly expensive thing to do so it
2551 should be done sparingly. */
2553 void
2554 unshare_all_rtl_again (insn)
2555 rtx insn;
2557 rtx p;
2558 tree decl;
2560 for (p = insn; p; p = NEXT_INSN (p))
2561 if (INSN_P (p))
2563 reset_used_flags (PATTERN (p));
2564 reset_used_flags (REG_NOTES (p));
2565 reset_used_flags (LOG_LINKS (p));
2568 /* Make sure that virtual stack slots are not shared. */
2569 reset_used_decls (DECL_INITIAL (cfun->decl));
2571 /* Make sure that virtual parameters are not shared. */
2572 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2573 reset_used_flags (DECL_RTL (decl));
2575 reset_used_flags (stack_slot_list);
2577 unshare_all_rtl (cfun->decl, insn);
2580 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2581 Assumes the mark bits are cleared at entry. */
2583 static void
2584 unshare_all_rtl_1 (insn)
2585 rtx insn;
2587 for (; insn; insn = NEXT_INSN (insn))
2588 if (INSN_P (insn))
2590 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2591 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2592 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2596 /* Go through all virtual stack slots of a function and copy any
2597 shared structure. */
2598 static void
2599 unshare_all_decls (blk)
2600 tree blk;
2602 tree t;
2604 /* Copy shared decls. */
2605 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2606 if (DECL_RTL_SET_P (t))
2607 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2609 /* Now process sub-blocks. */
2610 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2611 unshare_all_decls (t);
2614 /* Go through all virtual stack slots of a function and mark them as
2615 not shared. */
2616 static void
2617 reset_used_decls (blk)
2618 tree blk;
2620 tree t;
2622 /* Mark decls. */
2623 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2624 if (DECL_RTL_SET_P (t))
2625 reset_used_flags (DECL_RTL (t));
2627 /* Now process sub-blocks. */
2628 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2629 reset_used_decls (t);
2632 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2633 placed in the result directly, rather than being copied. MAY_SHARE is
2634 either a MEM of an EXPR_LIST of MEMs. */
2637 copy_most_rtx (orig, may_share)
2638 rtx orig;
2639 rtx may_share;
2641 rtx copy;
2642 int i, j;
2643 RTX_CODE code;
2644 const char *format_ptr;
2646 if (orig == may_share
2647 || (GET_CODE (may_share) == EXPR_LIST
2648 && in_expr_list_p (may_share, orig)))
2649 return orig;
2651 code = GET_CODE (orig);
2653 switch (code)
2655 case REG:
2656 case QUEUED:
2657 case CONST_INT:
2658 case CONST_DOUBLE:
2659 case CONST_VECTOR:
2660 case SYMBOL_REF:
2661 case CODE_LABEL:
2662 case PC:
2663 case CC0:
2664 return orig;
2665 default:
2666 break;
2669 copy = rtx_alloc (code);
2670 PUT_MODE (copy, GET_MODE (orig));
2671 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2672 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2673 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2674 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2675 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2677 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2679 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2681 switch (*format_ptr++)
2683 case 'e':
2684 XEXP (copy, i) = XEXP (orig, i);
2685 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2686 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2687 break;
2689 case 'u':
2690 XEXP (copy, i) = XEXP (orig, i);
2691 break;
2693 case 'E':
2694 case 'V':
2695 XVEC (copy, i) = XVEC (orig, i);
2696 if (XVEC (orig, i) != NULL)
2698 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2699 for (j = 0; j < XVECLEN (copy, i); j++)
2700 XVECEXP (copy, i, j)
2701 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2703 break;
2705 case 'w':
2706 XWINT (copy, i) = XWINT (orig, i);
2707 break;
2709 case 'n':
2710 case 'i':
2711 XINT (copy, i) = XINT (orig, i);
2712 break;
2714 case 't':
2715 XTREE (copy, i) = XTREE (orig, i);
2716 break;
2718 case 's':
2719 case 'S':
2720 XSTR (copy, i) = XSTR (orig, i);
2721 break;
2723 case '0':
2724 /* Copy this through the wide int field; that's safest. */
2725 X0WINT (copy, i) = X0WINT (orig, i);
2726 break;
2728 default:
2729 abort ();
2732 return copy;
2735 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2736 Recursively does the same for subexpressions. */
2739 copy_rtx_if_shared (orig)
2740 rtx orig;
2742 rtx x = orig;
2743 int i;
2744 enum rtx_code code;
2745 const char *format_ptr;
2746 int copied = 0;
2748 if (x == 0)
2749 return 0;
2751 code = GET_CODE (x);
2753 /* These types may be freely shared. */
2755 switch (code)
2757 case REG:
2758 case QUEUED:
2759 case CONST_INT:
2760 case CONST_DOUBLE:
2761 case CONST_VECTOR:
2762 case SYMBOL_REF:
2763 case CODE_LABEL:
2764 case PC:
2765 case CC0:
2766 case SCRATCH:
2767 /* SCRATCH must be shared because they represent distinct values. */
2768 return x;
2770 case CONST:
2771 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2772 a LABEL_REF, it isn't sharable. */
2773 if (GET_CODE (XEXP (x, 0)) == PLUS
2774 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2775 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2776 return x;
2777 break;
2779 case INSN:
2780 case JUMP_INSN:
2781 case CALL_INSN:
2782 case NOTE:
2783 case BARRIER:
2784 /* The chain of insns is not being copied. */
2785 return x;
2787 case MEM:
2788 /* A MEM is allowed to be shared if its address is constant.
2790 We used to allow sharing of MEMs which referenced
2791 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2792 that can lose. instantiate_virtual_regs will not unshare
2793 the MEMs, and combine may change the structure of the address
2794 because it looks safe and profitable in one context, but
2795 in some other context it creates unrecognizable RTL. */
2796 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2797 return x;
2799 break;
2801 default:
2802 break;
2805 /* This rtx may not be shared. If it has already been seen,
2806 replace it with a copy of itself. */
2808 if (RTX_FLAG (x, used))
2810 rtx copy;
2812 copy = rtx_alloc (code);
2813 memcpy (copy, x,
2814 (sizeof (*copy) - sizeof (copy->fld)
2815 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2816 x = copy;
2817 copied = 1;
2819 RTX_FLAG (x, used) = 1;
2821 /* Now scan the subexpressions recursively.
2822 We can store any replaced subexpressions directly into X
2823 since we know X is not shared! Any vectors in X
2824 must be copied if X was copied. */
2826 format_ptr = GET_RTX_FORMAT (code);
2828 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2830 switch (*format_ptr++)
2832 case 'e':
2833 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2834 break;
2836 case 'E':
2837 if (XVEC (x, i) != NULL)
2839 int j;
2840 int len = XVECLEN (x, i);
2842 if (copied && len > 0)
2843 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2844 for (j = 0; j < len; j++)
2845 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2847 break;
2850 return x;
2853 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2854 to look for shared sub-parts. */
2856 void
2857 reset_used_flags (x)
2858 rtx x;
2860 int i, j;
2861 enum rtx_code code;
2862 const char *format_ptr;
2864 if (x == 0)
2865 return;
2867 code = GET_CODE (x);
2869 /* These types may be freely shared so we needn't do any resetting
2870 for them. */
2872 switch (code)
2874 case REG:
2875 case QUEUED:
2876 case CONST_INT:
2877 case CONST_DOUBLE:
2878 case CONST_VECTOR:
2879 case SYMBOL_REF:
2880 case CODE_LABEL:
2881 case PC:
2882 case CC0:
2883 return;
2885 case INSN:
2886 case JUMP_INSN:
2887 case CALL_INSN:
2888 case NOTE:
2889 case LABEL_REF:
2890 case BARRIER:
2891 /* The chain of insns is not being copied. */
2892 return;
2894 default:
2895 break;
2898 RTX_FLAG (x, used) = 0;
2900 format_ptr = GET_RTX_FORMAT (code);
2901 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2903 switch (*format_ptr++)
2905 case 'e':
2906 reset_used_flags (XEXP (x, i));
2907 break;
2909 case 'E':
2910 for (j = 0; j < XVECLEN (x, i); j++)
2911 reset_used_flags (XVECEXP (x, i, j));
2912 break;
2917 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2918 Return X or the rtx for the pseudo reg the value of X was copied into.
2919 OTHER must be valid as a SET_DEST. */
2922 make_safe_from (x, other)
2923 rtx x, other;
2925 while (1)
2926 switch (GET_CODE (other))
2928 case SUBREG:
2929 other = SUBREG_REG (other);
2930 break;
2931 case STRICT_LOW_PART:
2932 case SIGN_EXTEND:
2933 case ZERO_EXTEND:
2934 other = XEXP (other, 0);
2935 break;
2936 default:
2937 goto done;
2939 done:
2940 if ((GET_CODE (other) == MEM
2941 && ! CONSTANT_P (x)
2942 && GET_CODE (x) != REG
2943 && GET_CODE (x) != SUBREG)
2944 || (GET_CODE (other) == REG
2945 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2946 || reg_mentioned_p (other, x))))
2948 rtx temp = gen_reg_rtx (GET_MODE (x));
2949 emit_move_insn (temp, x);
2950 return temp;
2952 return x;
2955 /* Emission of insns (adding them to the doubly-linked list). */
2957 /* Return the first insn of the current sequence or current function. */
2960 get_insns ()
2962 return first_insn;
2965 /* Specify a new insn as the first in the chain. */
2967 void
2968 set_first_insn (insn)
2969 rtx insn;
2971 if (PREV_INSN (insn) != 0)
2972 abort ();
2973 first_insn = insn;
2976 /* Return the last insn emitted in current sequence or current function. */
2979 get_last_insn ()
2981 return last_insn;
2984 /* Specify a new insn as the last in the chain. */
2986 void
2987 set_last_insn (insn)
2988 rtx insn;
2990 if (NEXT_INSN (insn) != 0)
2991 abort ();
2992 last_insn = insn;
2995 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2998 get_last_insn_anywhere ()
3000 struct sequence_stack *stack;
3001 if (last_insn)
3002 return last_insn;
3003 for (stack = seq_stack; stack; stack = stack->next)
3004 if (stack->last != 0)
3005 return stack->last;
3006 return 0;
3009 /* Return the first nonnote insn emitted in current sequence or current
3010 function. This routine looks inside SEQUENCEs. */
3013 get_first_nonnote_insn ()
3015 rtx insn = first_insn;
3017 while (insn)
3019 insn = next_insn (insn);
3020 if (insn == 0 || GET_CODE (insn) != NOTE)
3021 break;
3024 return insn;
3027 /* Return the last nonnote insn emitted in current sequence or current
3028 function. This routine looks inside SEQUENCEs. */
3031 get_last_nonnote_insn ()
3033 rtx insn = last_insn;
3035 while (insn)
3037 insn = previous_insn (insn);
3038 if (insn == 0 || GET_CODE (insn) != NOTE)
3039 break;
3042 return insn;
3045 /* Return a number larger than any instruction's uid in this function. */
3048 get_max_uid ()
3050 return cur_insn_uid;
3053 /* Renumber instructions so that no instruction UIDs are wasted. */
3055 void
3056 renumber_insns (stream)
3057 FILE *stream;
3059 rtx insn;
3061 /* If we're not supposed to renumber instructions, don't. */
3062 if (!flag_renumber_insns)
3063 return;
3065 /* If there aren't that many instructions, then it's not really
3066 worth renumbering them. */
3067 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3068 return;
3070 cur_insn_uid = 1;
3072 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3074 if (stream)
3075 fprintf (stream, "Renumbering insn %d to %d\n",
3076 INSN_UID (insn), cur_insn_uid);
3077 INSN_UID (insn) = cur_insn_uid++;
3081 /* Return the next insn. If it is a SEQUENCE, return the first insn
3082 of the sequence. */
3085 next_insn (insn)
3086 rtx insn;
3088 if (insn)
3090 insn = NEXT_INSN (insn);
3091 if (insn && GET_CODE (insn) == INSN
3092 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3093 insn = XVECEXP (PATTERN (insn), 0, 0);
3096 return insn;
3099 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3100 of the sequence. */
3103 previous_insn (insn)
3104 rtx insn;
3106 if (insn)
3108 insn = PREV_INSN (insn);
3109 if (insn && GET_CODE (insn) == INSN
3110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3111 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3114 return insn;
3117 /* Return the next insn after INSN that is not a NOTE. This routine does not
3118 look inside SEQUENCEs. */
3121 next_nonnote_insn (insn)
3122 rtx insn;
3124 while (insn)
3126 insn = NEXT_INSN (insn);
3127 if (insn == 0 || GET_CODE (insn) != NOTE)
3128 break;
3131 return insn;
3134 /* Return the previous insn before INSN that is not a NOTE. This routine does
3135 not look inside SEQUENCEs. */
3138 prev_nonnote_insn (insn)
3139 rtx insn;
3141 while (insn)
3143 insn = PREV_INSN (insn);
3144 if (insn == 0 || GET_CODE (insn) != NOTE)
3145 break;
3148 return insn;
3151 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3152 or 0, if there is none. This routine does not look inside
3153 SEQUENCEs. */
3156 next_real_insn (insn)
3157 rtx insn;
3159 while (insn)
3161 insn = NEXT_INSN (insn);
3162 if (insn == 0 || GET_CODE (insn) == INSN
3163 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3164 break;
3167 return insn;
3170 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3171 or 0, if there is none. This routine does not look inside
3172 SEQUENCEs. */
3175 prev_real_insn (insn)
3176 rtx insn;
3178 while (insn)
3180 insn = PREV_INSN (insn);
3181 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3182 || GET_CODE (insn) == JUMP_INSN)
3183 break;
3186 return insn;
3189 /* Find the next insn after INSN that really does something. This routine
3190 does not look inside SEQUENCEs. Until reload has completed, this is the
3191 same as next_real_insn. */
3194 active_insn_p (insn)
3195 rtx insn;
3197 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3198 || (GET_CODE (insn) == INSN
3199 && (! reload_completed
3200 || (GET_CODE (PATTERN (insn)) != USE
3201 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3205 next_active_insn (insn)
3206 rtx insn;
3208 while (insn)
3210 insn = NEXT_INSN (insn);
3211 if (insn == 0 || active_insn_p (insn))
3212 break;
3215 return insn;
3218 /* Find the last insn before INSN that really does something. This routine
3219 does not look inside SEQUENCEs. Until reload has completed, this is the
3220 same as prev_real_insn. */
3223 prev_active_insn (insn)
3224 rtx insn;
3226 while (insn)
3228 insn = PREV_INSN (insn);
3229 if (insn == 0 || active_insn_p (insn))
3230 break;
3233 return insn;
3236 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3239 next_label (insn)
3240 rtx insn;
3242 while (insn)
3244 insn = NEXT_INSN (insn);
3245 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3246 break;
3249 return insn;
3252 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3255 prev_label (insn)
3256 rtx insn;
3258 while (insn)
3260 insn = PREV_INSN (insn);
3261 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3262 break;
3265 return insn;
3268 #ifdef HAVE_cc0
3269 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3270 and REG_CC_USER notes so we can find it. */
3272 void
3273 link_cc0_insns (insn)
3274 rtx insn;
3276 rtx user = next_nonnote_insn (insn);
3278 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3279 user = XVECEXP (PATTERN (user), 0, 0);
3281 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3282 REG_NOTES (user));
3283 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3286 /* Return the next insn that uses CC0 after INSN, which is assumed to
3287 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3288 applied to the result of this function should yield INSN).
3290 Normally, this is simply the next insn. However, if a REG_CC_USER note
3291 is present, it contains the insn that uses CC0.
3293 Return 0 if we can't find the insn. */
3296 next_cc0_user (insn)
3297 rtx insn;
3299 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3301 if (note)
3302 return XEXP (note, 0);
3304 insn = next_nonnote_insn (insn);
3305 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3306 insn = XVECEXP (PATTERN (insn), 0, 0);
3308 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3309 return insn;
3311 return 0;
3314 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3315 note, it is the previous insn. */
3318 prev_cc0_setter (insn)
3319 rtx insn;
3321 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3323 if (note)
3324 return XEXP (note, 0);
3326 insn = prev_nonnote_insn (insn);
3327 if (! sets_cc0_p (PATTERN (insn)))
3328 abort ();
3330 return insn;
3332 #endif
3334 /* Increment the label uses for all labels present in rtx. */
3336 static void
3337 mark_label_nuses (x)
3338 rtx x;
3340 enum rtx_code code;
3341 int i, j;
3342 const char *fmt;
3344 code = GET_CODE (x);
3345 if (code == LABEL_REF)
3346 LABEL_NUSES (XEXP (x, 0))++;
3348 fmt = GET_RTX_FORMAT (code);
3349 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3351 if (fmt[i] == 'e')
3352 mark_label_nuses (XEXP (x, i));
3353 else if (fmt[i] == 'E')
3354 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3355 mark_label_nuses (XVECEXP (x, i, j));
3360 /* Try splitting insns that can be split for better scheduling.
3361 PAT is the pattern which might split.
3362 TRIAL is the insn providing PAT.
3363 LAST is nonzero if we should return the last insn of the sequence produced.
3365 If this routine succeeds in splitting, it returns the first or last
3366 replacement insn depending on the value of LAST. Otherwise, it
3367 returns TRIAL. If the insn to be returned can be split, it will be. */
3370 try_split (pat, trial, last)
3371 rtx pat, trial;
3372 int last;
3374 rtx before = PREV_INSN (trial);
3375 rtx after = NEXT_INSN (trial);
3376 int has_barrier = 0;
3377 rtx tem;
3378 rtx note, seq;
3379 int probability;
3380 rtx insn_last, insn;
3381 int njumps = 0;
3383 if (any_condjump_p (trial)
3384 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3385 split_branch_probability = INTVAL (XEXP (note, 0));
3386 probability = split_branch_probability;
3388 seq = split_insns (pat, trial);
3390 split_branch_probability = -1;
3392 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3393 We may need to handle this specially. */
3394 if (after && GET_CODE (after) == BARRIER)
3396 has_barrier = 1;
3397 after = NEXT_INSN (after);
3400 if (!seq)
3401 return trial;
3403 /* Avoid infinite loop if any insn of the result matches
3404 the original pattern. */
3405 insn_last = seq;
3406 while (1)
3408 if (INSN_P (insn_last)
3409 && rtx_equal_p (PATTERN (insn_last), pat))
3410 return trial;
3411 if (!NEXT_INSN (insn_last))
3412 break;
3413 insn_last = NEXT_INSN (insn_last);
3416 /* Mark labels. */
3417 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3419 if (GET_CODE (insn) == JUMP_INSN)
3421 mark_jump_label (PATTERN (insn), insn, 0);
3422 njumps++;
3423 if (probability != -1
3424 && any_condjump_p (insn)
3425 && !find_reg_note (insn, REG_BR_PROB, 0))
3427 /* We can preserve the REG_BR_PROB notes only if exactly
3428 one jump is created, otherwise the machine description
3429 is responsible for this step using
3430 split_branch_probability variable. */
3431 if (njumps != 1)
3432 abort ();
3433 REG_NOTES (insn)
3434 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3435 GEN_INT (probability),
3436 REG_NOTES (insn));
3441 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3442 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3443 if (GET_CODE (trial) == CALL_INSN)
3445 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3446 if (GET_CODE (insn) == CALL_INSN)
3448 CALL_INSN_FUNCTION_USAGE (insn)
3449 = CALL_INSN_FUNCTION_USAGE (trial);
3450 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3454 /* Copy notes, particularly those related to the CFG. */
3455 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3457 switch (REG_NOTE_KIND (note))
3459 case REG_EH_REGION:
3460 insn = insn_last;
3461 while (insn != NULL_RTX)
3463 if (GET_CODE (insn) == CALL_INSN
3464 || (flag_non_call_exceptions
3465 && may_trap_p (PATTERN (insn))))
3466 REG_NOTES (insn)
3467 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3468 XEXP (note, 0),
3469 REG_NOTES (insn));
3470 insn = PREV_INSN (insn);
3472 break;
3474 case REG_NORETURN:
3475 case REG_SETJMP:
3476 case REG_ALWAYS_RETURN:
3477 insn = insn_last;
3478 while (insn != NULL_RTX)
3480 if (GET_CODE (insn) == CALL_INSN)
3481 REG_NOTES (insn)
3482 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3483 XEXP (note, 0),
3484 REG_NOTES (insn));
3485 insn = PREV_INSN (insn);
3487 break;
3489 case REG_NON_LOCAL_GOTO:
3490 insn = insn_last;
3491 while (insn != NULL_RTX)
3493 if (GET_CODE (insn) == JUMP_INSN)
3494 REG_NOTES (insn)
3495 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3496 XEXP (note, 0),
3497 REG_NOTES (insn));
3498 insn = PREV_INSN (insn);
3500 break;
3502 default:
3503 break;
3507 /* If there are LABELS inside the split insns increment the
3508 usage count so we don't delete the label. */
3509 if (GET_CODE (trial) == INSN)
3511 insn = insn_last;
3512 while (insn != NULL_RTX)
3514 if (GET_CODE (insn) == INSN)
3515 mark_label_nuses (PATTERN (insn));
3517 insn = PREV_INSN (insn);
3521 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3523 delete_insn (trial);
3524 if (has_barrier)
3525 emit_barrier_after (tem);
3527 /* Recursively call try_split for each new insn created; by the
3528 time control returns here that insn will be fully split, so
3529 set LAST and continue from the insn after the one returned.
3530 We can't use next_active_insn here since AFTER may be a note.
3531 Ignore deleted insns, which can be occur if not optimizing. */
3532 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3533 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3534 tem = try_split (PATTERN (tem), tem, 1);
3536 /* Return either the first or the last insn, depending on which was
3537 requested. */
3538 return last
3539 ? (after ? PREV_INSN (after) : last_insn)
3540 : NEXT_INSN (before);
3543 /* Make and return an INSN rtx, initializing all its slots.
3544 Store PATTERN in the pattern slots. */
3547 make_insn_raw (pattern)
3548 rtx pattern;
3550 rtx insn;
3552 insn = rtx_alloc (INSN);
3554 INSN_UID (insn) = cur_insn_uid++;
3555 PATTERN (insn) = pattern;
3556 INSN_CODE (insn) = -1;
3557 LOG_LINKS (insn) = NULL;
3558 REG_NOTES (insn) = NULL;
3559 INSN_SCOPE (insn) = NULL;
3560 BLOCK_FOR_INSN (insn) = NULL;
3562 #ifdef ENABLE_RTL_CHECKING
3563 if (insn
3564 && INSN_P (insn)
3565 && (returnjump_p (insn)
3566 || (GET_CODE (insn) == SET
3567 && SET_DEST (insn) == pc_rtx)))
3569 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3570 debug_rtx (insn);
3572 #endif
3574 return insn;
3577 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3579 static rtx
3580 make_jump_insn_raw (pattern)
3581 rtx pattern;
3583 rtx insn;
3585 insn = rtx_alloc (JUMP_INSN);
3586 INSN_UID (insn) = cur_insn_uid++;
3588 PATTERN (insn) = pattern;
3589 INSN_CODE (insn) = -1;
3590 LOG_LINKS (insn) = NULL;
3591 REG_NOTES (insn) = NULL;
3592 JUMP_LABEL (insn) = NULL;
3593 INSN_SCOPE (insn) = NULL;
3594 BLOCK_FOR_INSN (insn) = NULL;
3596 return insn;
3599 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3601 static rtx
3602 make_call_insn_raw (pattern)
3603 rtx pattern;
3605 rtx insn;
3607 insn = rtx_alloc (CALL_INSN);
3608 INSN_UID (insn) = cur_insn_uid++;
3610 PATTERN (insn) = pattern;
3611 INSN_CODE (insn) = -1;
3612 LOG_LINKS (insn) = NULL;
3613 REG_NOTES (insn) = NULL;
3614 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3615 INSN_SCOPE (insn) = NULL;
3616 BLOCK_FOR_INSN (insn) = NULL;
3618 return insn;
3621 /* Add INSN to the end of the doubly-linked list.
3622 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3624 void
3625 add_insn (insn)
3626 rtx insn;
3628 PREV_INSN (insn) = last_insn;
3629 NEXT_INSN (insn) = 0;
3631 if (NULL != last_insn)
3632 NEXT_INSN (last_insn) = insn;
3634 if (NULL == first_insn)
3635 first_insn = insn;
3637 last_insn = insn;
3640 /* Add INSN into the doubly-linked list after insn AFTER. This and
3641 the next should be the only functions called to insert an insn once
3642 delay slots have been filled since only they know how to update a
3643 SEQUENCE. */
3645 void
3646 add_insn_after (insn, after)
3647 rtx insn, after;
3649 rtx next = NEXT_INSN (after);
3650 basic_block bb;
3652 if (optimize && INSN_DELETED_P (after))
3653 abort ();
3655 NEXT_INSN (insn) = next;
3656 PREV_INSN (insn) = after;
3658 if (next)
3660 PREV_INSN (next) = insn;
3661 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3662 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3664 else if (last_insn == after)
3665 last_insn = insn;
3666 else
3668 struct sequence_stack *stack = seq_stack;
3669 /* Scan all pending sequences too. */
3670 for (; stack; stack = stack->next)
3671 if (after == stack->last)
3673 stack->last = insn;
3674 break;
3677 if (stack == 0)
3678 abort ();
3681 if (GET_CODE (after) != BARRIER
3682 && GET_CODE (insn) != BARRIER
3683 && (bb = BLOCK_FOR_INSN (after)))
3685 set_block_for_insn (insn, bb);
3686 if (INSN_P (insn))
3687 bb->flags |= BB_DIRTY;
3688 /* Should not happen as first in the BB is always
3689 either NOTE or LABEL. */
3690 if (bb->end == after
3691 /* Avoid clobbering of structure when creating new BB. */
3692 && GET_CODE (insn) != BARRIER
3693 && (GET_CODE (insn) != NOTE
3694 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3695 bb->end = insn;
3698 NEXT_INSN (after) = insn;
3699 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3701 rtx sequence = PATTERN (after);
3702 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3706 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3707 the previous should be the only functions called to insert an insn once
3708 delay slots have been filled since only they know how to update a
3709 SEQUENCE. */
3711 void
3712 add_insn_before (insn, before)
3713 rtx insn, before;
3715 rtx prev = PREV_INSN (before);
3716 basic_block bb;
3718 if (optimize && INSN_DELETED_P (before))
3719 abort ();
3721 PREV_INSN (insn) = prev;
3722 NEXT_INSN (insn) = before;
3724 if (prev)
3726 NEXT_INSN (prev) = insn;
3727 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3729 rtx sequence = PATTERN (prev);
3730 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3733 else if (first_insn == before)
3734 first_insn = insn;
3735 else
3737 struct sequence_stack *stack = seq_stack;
3738 /* Scan all pending sequences too. */
3739 for (; stack; stack = stack->next)
3740 if (before == stack->first)
3742 stack->first = insn;
3743 break;
3746 if (stack == 0)
3747 abort ();
3750 if (GET_CODE (before) != BARRIER
3751 && GET_CODE (insn) != BARRIER
3752 && (bb = BLOCK_FOR_INSN (before)))
3754 set_block_for_insn (insn, bb);
3755 if (INSN_P (insn))
3756 bb->flags |= BB_DIRTY;
3757 /* Should not happen as first in the BB is always
3758 either NOTE or LABEl. */
3759 if (bb->head == insn
3760 /* Avoid clobbering of structure when creating new BB. */
3761 && GET_CODE (insn) != BARRIER
3762 && (GET_CODE (insn) != NOTE
3763 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3764 abort ();
3767 PREV_INSN (before) = insn;
3768 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3769 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3772 /* Remove an insn from its doubly-linked list. This function knows how
3773 to handle sequences. */
3774 void
3775 remove_insn (insn)
3776 rtx insn;
3778 rtx next = NEXT_INSN (insn);
3779 rtx prev = PREV_INSN (insn);
3780 basic_block bb;
3782 if (prev)
3784 NEXT_INSN (prev) = next;
3785 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3787 rtx sequence = PATTERN (prev);
3788 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3791 else if (first_insn == insn)
3792 first_insn = next;
3793 else
3795 struct sequence_stack *stack = seq_stack;
3796 /* Scan all pending sequences too. */
3797 for (; stack; stack = stack->next)
3798 if (insn == stack->first)
3800 stack->first = next;
3801 break;
3804 if (stack == 0)
3805 abort ();
3808 if (next)
3810 PREV_INSN (next) = prev;
3811 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3812 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3814 else if (last_insn == insn)
3815 last_insn = prev;
3816 else
3818 struct sequence_stack *stack = seq_stack;
3819 /* Scan all pending sequences too. */
3820 for (; stack; stack = stack->next)
3821 if (insn == stack->last)
3823 stack->last = prev;
3824 break;
3827 if (stack == 0)
3828 abort ();
3830 if (GET_CODE (insn) != BARRIER
3831 && (bb = BLOCK_FOR_INSN (insn)))
3833 if (INSN_P (insn))
3834 bb->flags |= BB_DIRTY;
3835 if (bb->head == insn)
3837 /* Never ever delete the basic block note without deleting whole
3838 basic block. */
3839 if (GET_CODE (insn) == NOTE)
3840 abort ();
3841 bb->head = next;
3843 if (bb->end == insn)
3844 bb->end = prev;
3848 /* Delete all insns made since FROM.
3849 FROM becomes the new last instruction. */
3851 void
3852 delete_insns_since (from)
3853 rtx from;
3855 if (from == 0)
3856 first_insn = 0;
3857 else
3858 NEXT_INSN (from) = 0;
3859 last_insn = from;
3862 /* This function is deprecated, please use sequences instead.
3864 Move a consecutive bunch of insns to a different place in the chain.
3865 The insns to be moved are those between FROM and TO.
3866 They are moved to a new position after the insn AFTER.
3867 AFTER must not be FROM or TO or any insn in between.
3869 This function does not know about SEQUENCEs and hence should not be
3870 called after delay-slot filling has been done. */
3872 void
3873 reorder_insns_nobb (from, to, after)
3874 rtx from, to, after;
3876 /* Splice this bunch out of where it is now. */
3877 if (PREV_INSN (from))
3878 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3879 if (NEXT_INSN (to))
3880 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3881 if (last_insn == to)
3882 last_insn = PREV_INSN (from);
3883 if (first_insn == from)
3884 first_insn = NEXT_INSN (to);
3886 /* Make the new neighbors point to it and it to them. */
3887 if (NEXT_INSN (after))
3888 PREV_INSN (NEXT_INSN (after)) = to;
3890 NEXT_INSN (to) = NEXT_INSN (after);
3891 PREV_INSN (from) = after;
3892 NEXT_INSN (after) = from;
3893 if (after == last_insn)
3894 last_insn = to;
3897 /* Same as function above, but take care to update BB boundaries. */
3898 void
3899 reorder_insns (from, to, after)
3900 rtx from, to, after;
3902 rtx prev = PREV_INSN (from);
3903 basic_block bb, bb2;
3905 reorder_insns_nobb (from, to, after);
3907 if (GET_CODE (after) != BARRIER
3908 && (bb = BLOCK_FOR_INSN (after)))
3910 rtx x;
3911 bb->flags |= BB_DIRTY;
3913 if (GET_CODE (from) != BARRIER
3914 && (bb2 = BLOCK_FOR_INSN (from)))
3916 if (bb2->end == to)
3917 bb2->end = prev;
3918 bb2->flags |= BB_DIRTY;
3921 if (bb->end == after)
3922 bb->end = to;
3924 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3925 set_block_for_insn (x, bb);
3929 /* Return the line note insn preceding INSN. */
3931 static rtx
3932 find_line_note (insn)
3933 rtx insn;
3935 if (no_line_numbers)
3936 return 0;
3938 for (; insn; insn = PREV_INSN (insn))
3939 if (GET_CODE (insn) == NOTE
3940 && NOTE_LINE_NUMBER (insn) >= 0)
3941 break;
3943 return insn;
3946 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3947 of the moved insns when debugging. This may insert a note between AFTER
3948 and FROM, and another one after TO. */
3950 void
3951 reorder_insns_with_line_notes (from, to, after)
3952 rtx from, to, after;
3954 rtx from_line = find_line_note (from);
3955 rtx after_line = find_line_note (after);
3957 reorder_insns (from, to, after);
3959 if (from_line == after_line)
3960 return;
3962 if (from_line)
3963 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3964 NOTE_LINE_NUMBER (from_line),
3965 after);
3966 if (after_line)
3967 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3968 NOTE_LINE_NUMBER (after_line),
3969 to);
3972 /* Remove unnecessary notes from the instruction stream. */
3974 void
3975 remove_unnecessary_notes ()
3977 rtx block_stack = NULL_RTX;
3978 rtx eh_stack = NULL_RTX;
3979 rtx insn;
3980 rtx next;
3981 rtx tmp;
3983 /* We must not remove the first instruction in the function because
3984 the compiler depends on the first instruction being a note. */
3985 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3987 /* Remember what's next. */
3988 next = NEXT_INSN (insn);
3990 /* We're only interested in notes. */
3991 if (GET_CODE (insn) != NOTE)
3992 continue;
3994 switch (NOTE_LINE_NUMBER (insn))
3996 case NOTE_INSN_DELETED:
3997 case NOTE_INSN_LOOP_END_TOP_COND:
3998 remove_insn (insn);
3999 break;
4001 case NOTE_INSN_EH_REGION_BEG:
4002 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4003 break;
4005 case NOTE_INSN_EH_REGION_END:
4006 /* Too many end notes. */
4007 if (eh_stack == NULL_RTX)
4008 abort ();
4009 /* Mismatched nesting. */
4010 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4011 abort ();
4012 tmp = eh_stack;
4013 eh_stack = XEXP (eh_stack, 1);
4014 free_INSN_LIST_node (tmp);
4015 break;
4017 case NOTE_INSN_BLOCK_BEG:
4018 /* By now, all notes indicating lexical blocks should have
4019 NOTE_BLOCK filled in. */
4020 if (NOTE_BLOCK (insn) == NULL_TREE)
4021 abort ();
4022 block_stack = alloc_INSN_LIST (insn, block_stack);
4023 break;
4025 case NOTE_INSN_BLOCK_END:
4026 /* Too many end notes. */
4027 if (block_stack == NULL_RTX)
4028 abort ();
4029 /* Mismatched nesting. */
4030 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4031 abort ();
4032 tmp = block_stack;
4033 block_stack = XEXP (block_stack, 1);
4034 free_INSN_LIST_node (tmp);
4036 /* Scan back to see if there are any non-note instructions
4037 between INSN and the beginning of this block. If not,
4038 then there is no PC range in the generated code that will
4039 actually be in this block, so there's no point in
4040 remembering the existence of the block. */
4041 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4043 /* This block contains a real instruction. Note that we
4044 don't include labels; if the only thing in the block
4045 is a label, then there are still no PC values that
4046 lie within the block. */
4047 if (INSN_P (tmp))
4048 break;
4050 /* We're only interested in NOTEs. */
4051 if (GET_CODE (tmp) != NOTE)
4052 continue;
4054 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4056 /* We just verified that this BLOCK matches us with
4057 the block_stack check above. Never delete the
4058 BLOCK for the outermost scope of the function; we
4059 can refer to names from that scope even if the
4060 block notes are messed up. */
4061 if (! is_body_block (NOTE_BLOCK (insn))
4062 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4064 remove_insn (tmp);
4065 remove_insn (insn);
4067 break;
4069 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4070 /* There's a nested block. We need to leave the
4071 current block in place since otherwise the debugger
4072 wouldn't be able to show symbols from our block in
4073 the nested block. */
4074 break;
4079 /* Too many begin notes. */
4080 if (block_stack || eh_stack)
4081 abort ();
4085 /* Emit insn(s) of given code and pattern
4086 at a specified place within the doubly-linked list.
4088 All of the emit_foo global entry points accept an object
4089 X which is either an insn list or a PATTERN of a single
4090 instruction.
4092 There are thus a few canonical ways to generate code and
4093 emit it at a specific place in the instruction stream. For
4094 example, consider the instruction named SPOT and the fact that
4095 we would like to emit some instructions before SPOT. We might
4096 do it like this:
4098 start_sequence ();
4099 ... emit the new instructions ...
4100 insns_head = get_insns ();
4101 end_sequence ();
4103 emit_insn_before (insns_head, SPOT);
4105 It used to be common to generate SEQUENCE rtl instead, but that
4106 is a relic of the past which no longer occurs. The reason is that
4107 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4108 generated would almost certainly die right after it was created. */
4110 /* Make X be output before the instruction BEFORE. */
4113 emit_insn_before (x, before)
4114 rtx x, before;
4116 rtx last = before;
4117 rtx insn;
4119 #ifdef ENABLE_RTL_CHECKING
4120 if (before == NULL_RTX)
4121 abort ();
4122 #endif
4124 if (x == NULL_RTX)
4125 return last;
4127 switch (GET_CODE (x))
4129 case INSN:
4130 case JUMP_INSN:
4131 case CALL_INSN:
4132 case CODE_LABEL:
4133 case BARRIER:
4134 case NOTE:
4135 insn = x;
4136 while (insn)
4138 rtx next = NEXT_INSN (insn);
4139 add_insn_before (insn, before);
4140 last = insn;
4141 insn = next;
4143 break;
4145 #ifdef ENABLE_RTL_CHECKING
4146 case SEQUENCE:
4147 abort ();
4148 break;
4149 #endif
4151 default:
4152 last = make_insn_raw (x);
4153 add_insn_before (last, before);
4154 break;
4157 return last;
4160 /* Make an instruction with body X and code JUMP_INSN
4161 and output it before the instruction BEFORE. */
4164 emit_jump_insn_before (x, before)
4165 rtx x, before;
4167 rtx insn, last = NULL_RTX;
4169 #ifdef ENABLE_RTL_CHECKING
4170 if (before == NULL_RTX)
4171 abort ();
4172 #endif
4174 switch (GET_CODE (x))
4176 case INSN:
4177 case JUMP_INSN:
4178 case CALL_INSN:
4179 case CODE_LABEL:
4180 case BARRIER:
4181 case NOTE:
4182 insn = x;
4183 while (insn)
4185 rtx next = NEXT_INSN (insn);
4186 add_insn_before (insn, before);
4187 last = insn;
4188 insn = next;
4190 break;
4192 #ifdef ENABLE_RTL_CHECKING
4193 case SEQUENCE:
4194 abort ();
4195 break;
4196 #endif
4198 default:
4199 last = make_jump_insn_raw (x);
4200 add_insn_before (last, before);
4201 break;
4204 return last;
4207 /* Make an instruction with body X and code CALL_INSN
4208 and output it before the instruction BEFORE. */
4211 emit_call_insn_before (x, before)
4212 rtx x, before;
4214 rtx last = NULL_RTX, insn;
4216 #ifdef ENABLE_RTL_CHECKING
4217 if (before == NULL_RTX)
4218 abort ();
4219 #endif
4221 switch (GET_CODE (x))
4223 case INSN:
4224 case JUMP_INSN:
4225 case CALL_INSN:
4226 case CODE_LABEL:
4227 case BARRIER:
4228 case NOTE:
4229 insn = x;
4230 while (insn)
4232 rtx next = NEXT_INSN (insn);
4233 add_insn_before (insn, before);
4234 last = insn;
4235 insn = next;
4237 break;
4239 #ifdef ENABLE_RTL_CHECKING
4240 case SEQUENCE:
4241 abort ();
4242 break;
4243 #endif
4245 default:
4246 last = make_call_insn_raw (x);
4247 add_insn_before (last, before);
4248 break;
4251 return last;
4254 /* Make an insn of code BARRIER
4255 and output it before the insn BEFORE. */
4258 emit_barrier_before (before)
4259 rtx before;
4261 rtx insn = rtx_alloc (BARRIER);
4263 INSN_UID (insn) = cur_insn_uid++;
4265 add_insn_before (insn, before);
4266 return insn;
4269 /* Emit the label LABEL before the insn BEFORE. */
4272 emit_label_before (label, before)
4273 rtx label, before;
4275 /* This can be called twice for the same label as a result of the
4276 confusion that follows a syntax error! So make it harmless. */
4277 if (INSN_UID (label) == 0)
4279 INSN_UID (label) = cur_insn_uid++;
4280 add_insn_before (label, before);
4283 return label;
4286 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4289 emit_note_before (subtype, before)
4290 int subtype;
4291 rtx before;
4293 rtx note = rtx_alloc (NOTE);
4294 INSN_UID (note) = cur_insn_uid++;
4295 NOTE_SOURCE_FILE (note) = 0;
4296 NOTE_LINE_NUMBER (note) = subtype;
4297 BLOCK_FOR_INSN (note) = NULL;
4299 add_insn_before (note, before);
4300 return note;
4303 /* Helper for emit_insn_after, handles lists of instructions
4304 efficiently. */
4306 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4308 static rtx
4309 emit_insn_after_1 (first, after)
4310 rtx first, after;
4312 rtx last;
4313 rtx after_after;
4314 basic_block bb;
4316 if (GET_CODE (after) != BARRIER
4317 && (bb = BLOCK_FOR_INSN (after)))
4319 bb->flags |= BB_DIRTY;
4320 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4321 if (GET_CODE (last) != BARRIER)
4322 set_block_for_insn (last, bb);
4323 if (GET_CODE (last) != BARRIER)
4324 set_block_for_insn (last, bb);
4325 if (bb->end == after)
4326 bb->end = last;
4328 else
4329 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4330 continue;
4332 after_after = NEXT_INSN (after);
4334 NEXT_INSN (after) = first;
4335 PREV_INSN (first) = after;
4336 NEXT_INSN (last) = after_after;
4337 if (after_after)
4338 PREV_INSN (after_after) = last;
4340 if (after == last_insn)
4341 last_insn = last;
4342 return last;
4345 /* Make X be output after the insn AFTER. */
4348 emit_insn_after (x, after)
4349 rtx x, after;
4351 rtx last = after;
4353 #ifdef ENABLE_RTL_CHECKING
4354 if (after == NULL_RTX)
4355 abort ();
4356 #endif
4358 if (x == NULL_RTX)
4359 return last;
4361 switch (GET_CODE (x))
4363 case INSN:
4364 case JUMP_INSN:
4365 case CALL_INSN:
4366 case CODE_LABEL:
4367 case BARRIER:
4368 case NOTE:
4369 last = emit_insn_after_1 (x, after);
4370 break;
4372 #ifdef ENABLE_RTL_CHECKING
4373 case SEQUENCE:
4374 abort ();
4375 break;
4376 #endif
4378 default:
4379 last = make_insn_raw (x);
4380 add_insn_after (last, after);
4381 break;
4384 return last;
4387 /* Similar to emit_insn_after, except that line notes are to be inserted so
4388 as to act as if this insn were at FROM. */
4390 void
4391 emit_insn_after_with_line_notes (x, after, from)
4392 rtx x, after, from;
4394 rtx from_line = find_line_note (from);
4395 rtx after_line = find_line_note (after);
4396 rtx insn = emit_insn_after (x, after);
4398 if (from_line)
4399 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4400 NOTE_LINE_NUMBER (from_line),
4401 after);
4403 if (after_line)
4404 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4405 NOTE_LINE_NUMBER (after_line),
4406 insn);
4409 /* Make an insn of code JUMP_INSN with body X
4410 and output it after the insn AFTER. */
4413 emit_jump_insn_after (x, after)
4414 rtx x, after;
4416 rtx last;
4418 #ifdef ENABLE_RTL_CHECKING
4419 if (after == NULL_RTX)
4420 abort ();
4421 #endif
4423 switch (GET_CODE (x))
4425 case INSN:
4426 case JUMP_INSN:
4427 case CALL_INSN:
4428 case CODE_LABEL:
4429 case BARRIER:
4430 case NOTE:
4431 last = emit_insn_after_1 (x, after);
4432 break;
4434 #ifdef ENABLE_RTL_CHECKING
4435 case SEQUENCE:
4436 abort ();
4437 break;
4438 #endif
4440 default:
4441 last = make_jump_insn_raw (x);
4442 add_insn_after (last, after);
4443 break;
4446 return last;
4449 /* Make an instruction with body X and code CALL_INSN
4450 and output it after the instruction AFTER. */
4453 emit_call_insn_after (x, after)
4454 rtx x, after;
4456 rtx last;
4458 #ifdef ENABLE_RTL_CHECKING
4459 if (after == NULL_RTX)
4460 abort ();
4461 #endif
4463 switch (GET_CODE (x))
4465 case INSN:
4466 case JUMP_INSN:
4467 case CALL_INSN:
4468 case CODE_LABEL:
4469 case BARRIER:
4470 case NOTE:
4471 last = emit_insn_after_1 (x, after);
4472 break;
4474 #ifdef ENABLE_RTL_CHECKING
4475 case SEQUENCE:
4476 abort ();
4477 break;
4478 #endif
4480 default:
4481 last = make_call_insn_raw (x);
4482 add_insn_after (last, after);
4483 break;
4486 return last;
4489 /* Make an insn of code BARRIER
4490 and output it after the insn AFTER. */
4493 emit_barrier_after (after)
4494 rtx after;
4496 rtx insn = rtx_alloc (BARRIER);
4498 INSN_UID (insn) = cur_insn_uid++;
4500 add_insn_after (insn, after);
4501 return insn;
4504 /* Emit the label LABEL after the insn AFTER. */
4507 emit_label_after (label, after)
4508 rtx label, after;
4510 /* This can be called twice for the same label
4511 as a result of the confusion that follows a syntax error!
4512 So make it harmless. */
4513 if (INSN_UID (label) == 0)
4515 INSN_UID (label) = cur_insn_uid++;
4516 add_insn_after (label, after);
4519 return label;
4522 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4525 emit_note_after (subtype, after)
4526 int subtype;
4527 rtx after;
4529 rtx note = rtx_alloc (NOTE);
4530 INSN_UID (note) = cur_insn_uid++;
4531 NOTE_SOURCE_FILE (note) = 0;
4532 NOTE_LINE_NUMBER (note) = subtype;
4533 BLOCK_FOR_INSN (note) = NULL;
4534 add_insn_after (note, after);
4535 return note;
4538 /* Emit a line note for FILE and LINE after the insn AFTER. */
4541 emit_line_note_after (file, line, after)
4542 const char *file;
4543 int line;
4544 rtx after;
4546 rtx note;
4548 if (no_line_numbers && line > 0)
4550 cur_insn_uid++;
4551 return 0;
4554 note = rtx_alloc (NOTE);
4555 INSN_UID (note) = cur_insn_uid++;
4556 NOTE_SOURCE_FILE (note) = file;
4557 NOTE_LINE_NUMBER (note) = line;
4558 BLOCK_FOR_INSN (note) = NULL;
4559 add_insn_after (note, after);
4560 return note;
4563 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4565 emit_insn_after_scope (pattern, after, scope)
4566 rtx pattern, after;
4567 tree scope;
4569 rtx last = emit_insn_after (pattern, after);
4571 after = NEXT_INSN (after);
4572 while (1)
4574 if (active_insn_p (after))
4575 INSN_SCOPE (after) = scope;
4576 if (after == last)
4577 break;
4578 after = NEXT_INSN (after);
4580 return last;
4583 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4585 emit_jump_insn_after_scope (pattern, after, scope)
4586 rtx pattern, after;
4587 tree scope;
4589 rtx last = emit_jump_insn_after (pattern, after);
4591 after = NEXT_INSN (after);
4592 while (1)
4594 if (active_insn_p (after))
4595 INSN_SCOPE (after) = scope;
4596 if (after == last)
4597 break;
4598 after = NEXT_INSN (after);
4600 return last;
4603 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4605 emit_call_insn_after_scope (pattern, after, scope)
4606 rtx pattern, after;
4607 tree scope;
4609 rtx last = emit_call_insn_after (pattern, after);
4611 after = NEXT_INSN (after);
4612 while (1)
4614 if (active_insn_p (after))
4615 INSN_SCOPE (after) = scope;
4616 if (after == last)
4617 break;
4618 after = NEXT_INSN (after);
4620 return last;
4623 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4625 emit_insn_before_scope (pattern, before, scope)
4626 rtx pattern, before;
4627 tree scope;
4629 rtx first = PREV_INSN (before);
4630 rtx last = emit_insn_before (pattern, before);
4632 first = NEXT_INSN (first);
4633 while (1)
4635 if (active_insn_p (first))
4636 INSN_SCOPE (first) = scope;
4637 if (first == last)
4638 break;
4639 first = NEXT_INSN (first);
4641 return last;
4644 /* Take X and emit it at the end of the doubly-linked
4645 INSN list.
4647 Returns the last insn emitted. */
4650 emit_insn (x)
4651 rtx x;
4653 rtx last = last_insn;
4654 rtx insn;
4656 if (x == NULL_RTX)
4657 return last;
4659 switch (GET_CODE (x))
4661 case INSN:
4662 case JUMP_INSN:
4663 case CALL_INSN:
4664 case CODE_LABEL:
4665 case BARRIER:
4666 case NOTE:
4667 insn = x;
4668 while (insn)
4670 rtx next = NEXT_INSN (insn);
4671 add_insn (insn);
4672 last = insn;
4673 insn = next;
4675 break;
4677 #ifdef ENABLE_RTL_CHECKING
4678 case SEQUENCE:
4679 abort ();
4680 break;
4681 #endif
4683 default:
4684 last = make_insn_raw (x);
4685 add_insn (last);
4686 break;
4689 return last;
4692 /* Make an insn of code JUMP_INSN with pattern X
4693 and add it to the end of the doubly-linked list. */
4696 emit_jump_insn (x)
4697 rtx x;
4699 rtx last = NULL_RTX, insn;
4701 switch (GET_CODE (x))
4703 case INSN:
4704 case JUMP_INSN:
4705 case CALL_INSN:
4706 case CODE_LABEL:
4707 case BARRIER:
4708 case NOTE:
4709 insn = x;
4710 while (insn)
4712 rtx next = NEXT_INSN (insn);
4713 add_insn (insn);
4714 last = insn;
4715 insn = next;
4717 break;
4719 #ifdef ENABLE_RTL_CHECKING
4720 case SEQUENCE:
4721 abort ();
4722 break;
4723 #endif
4725 default:
4726 last = make_jump_insn_raw (x);
4727 add_insn (last);
4728 break;
4731 return last;
4734 /* Make an insn of code CALL_INSN with pattern X
4735 and add it to the end of the doubly-linked list. */
4738 emit_call_insn (x)
4739 rtx x;
4741 rtx insn;
4743 switch (GET_CODE (x))
4745 case INSN:
4746 case JUMP_INSN:
4747 case CALL_INSN:
4748 case CODE_LABEL:
4749 case BARRIER:
4750 case NOTE:
4751 insn = emit_insn (x);
4752 break;
4754 #ifdef ENABLE_RTL_CHECKING
4755 case SEQUENCE:
4756 abort ();
4757 break;
4758 #endif
4760 default:
4761 insn = make_call_insn_raw (x);
4762 add_insn (insn);
4763 break;
4766 return insn;
4769 /* Add the label LABEL to the end of the doubly-linked list. */
4772 emit_label (label)
4773 rtx label;
4775 /* This can be called twice for the same label
4776 as a result of the confusion that follows a syntax error!
4777 So make it harmless. */
4778 if (INSN_UID (label) == 0)
4780 INSN_UID (label) = cur_insn_uid++;
4781 add_insn (label);
4783 return label;
4786 /* Make an insn of code BARRIER
4787 and add it to the end of the doubly-linked list. */
4790 emit_barrier ()
4792 rtx barrier = rtx_alloc (BARRIER);
4793 INSN_UID (barrier) = cur_insn_uid++;
4794 add_insn (barrier);
4795 return barrier;
4798 /* Make an insn of code NOTE
4799 with data-fields specified by FILE and LINE
4800 and add it to the end of the doubly-linked list,
4801 but only if line-numbers are desired for debugging info. */
4804 emit_line_note (file, line)
4805 const char *file;
4806 int line;
4808 set_file_and_line_for_stmt (file, line);
4810 #if 0
4811 if (no_line_numbers)
4812 return 0;
4813 #endif
4815 return emit_note (file, line);
4818 /* Make an insn of code NOTE
4819 with data-fields specified by FILE and LINE
4820 and add it to the end of the doubly-linked list.
4821 If it is a line-number NOTE, omit it if it matches the previous one. */
4824 emit_note (file, line)
4825 const char *file;
4826 int line;
4828 rtx note;
4830 if (line > 0)
4832 if (file && last_filename && !strcmp (file, last_filename)
4833 && line == last_linenum)
4834 return 0;
4835 last_filename = file;
4836 last_linenum = line;
4839 if (no_line_numbers && line > 0)
4841 cur_insn_uid++;
4842 return 0;
4845 note = rtx_alloc (NOTE);
4846 INSN_UID (note) = cur_insn_uid++;
4847 NOTE_SOURCE_FILE (note) = file;
4848 NOTE_LINE_NUMBER (note) = line;
4849 BLOCK_FOR_INSN (note) = NULL;
4850 add_insn (note);
4851 return note;
4854 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4857 emit_line_note_force (file, line)
4858 const char *file;
4859 int line;
4861 last_linenum = -1;
4862 return emit_line_note (file, line);
4865 /* Cause next statement to emit a line note even if the line number
4866 has not changed. This is used at the beginning of a function. */
4868 void
4869 force_next_line_note ()
4871 last_linenum = -1;
4874 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4875 note of this type already exists, remove it first. */
4878 set_unique_reg_note (insn, kind, datum)
4879 rtx insn;
4880 enum reg_note kind;
4881 rtx datum;
4883 rtx note = find_reg_note (insn, kind, NULL_RTX);
4885 switch (kind)
4887 case REG_EQUAL:
4888 case REG_EQUIV:
4889 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4890 has multiple sets (some callers assume single_set
4891 means the insn only has one set, when in fact it
4892 means the insn only has one * useful * set). */
4893 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4895 if (note)
4896 abort ();
4897 return NULL_RTX;
4900 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4901 It serves no useful purpose and breaks eliminate_regs. */
4902 if (GET_CODE (datum) == ASM_OPERANDS)
4903 return NULL_RTX;
4904 break;
4906 default:
4907 break;
4910 if (note)
4912 XEXP (note, 0) = datum;
4913 return note;
4916 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4917 return REG_NOTES (insn);
4920 /* Return an indication of which type of insn should have X as a body.
4921 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4923 enum rtx_code
4924 classify_insn (x)
4925 rtx x;
4927 if (GET_CODE (x) == CODE_LABEL)
4928 return CODE_LABEL;
4929 if (GET_CODE (x) == CALL)
4930 return CALL_INSN;
4931 if (GET_CODE (x) == RETURN)
4932 return JUMP_INSN;
4933 if (GET_CODE (x) == SET)
4935 if (SET_DEST (x) == pc_rtx)
4936 return JUMP_INSN;
4937 else if (GET_CODE (SET_SRC (x)) == CALL)
4938 return CALL_INSN;
4939 else
4940 return INSN;
4942 if (GET_CODE (x) == PARALLEL)
4944 int j;
4945 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4946 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4947 return CALL_INSN;
4948 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4949 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4950 return JUMP_INSN;
4951 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4952 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4953 return CALL_INSN;
4955 return INSN;
4958 /* Emit the rtl pattern X as an appropriate kind of insn.
4959 If X is a label, it is simply added into the insn chain. */
4962 emit (x)
4963 rtx x;
4965 enum rtx_code code = classify_insn (x);
4967 if (code == CODE_LABEL)
4968 return emit_label (x);
4969 else if (code == INSN)
4970 return emit_insn (x);
4971 else if (code == JUMP_INSN)
4973 rtx insn = emit_jump_insn (x);
4974 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4975 return emit_barrier ();
4976 return insn;
4978 else if (code == CALL_INSN)
4979 return emit_call_insn (x);
4980 else
4981 abort ();
4984 /* Space for free sequence stack entries. */
4985 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4987 /* Begin emitting insns to a sequence which can be packaged in an
4988 RTL_EXPR. If this sequence will contain something that might cause
4989 the compiler to pop arguments to function calls (because those
4990 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4991 details), use do_pending_stack_adjust before calling this function.
4992 That will ensure that the deferred pops are not accidentally
4993 emitted in the middle of this sequence. */
4995 void
4996 start_sequence ()
4998 struct sequence_stack *tem;
5000 if (free_sequence_stack != NULL)
5002 tem = free_sequence_stack;
5003 free_sequence_stack = tem->next;
5005 else
5006 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5008 tem->next = seq_stack;
5009 tem->first = first_insn;
5010 tem->last = last_insn;
5011 tem->sequence_rtl_expr = seq_rtl_expr;
5013 seq_stack = tem;
5015 first_insn = 0;
5016 last_insn = 0;
5019 /* Similarly, but indicate that this sequence will be placed in T, an
5020 RTL_EXPR. See the documentation for start_sequence for more
5021 information about how to use this function. */
5023 void
5024 start_sequence_for_rtl_expr (t)
5025 tree t;
5027 start_sequence ();
5029 seq_rtl_expr = t;
5032 /* Set up the insn chain starting with FIRST as the current sequence,
5033 saving the previously current one. See the documentation for
5034 start_sequence for more information about how to use this function. */
5036 void
5037 push_to_sequence (first)
5038 rtx first;
5040 rtx last;
5042 start_sequence ();
5044 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5046 first_insn = first;
5047 last_insn = last;
5050 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5052 void
5053 push_to_full_sequence (first, last)
5054 rtx first, last;
5056 start_sequence ();
5057 first_insn = first;
5058 last_insn = last;
5059 /* We really should have the end of the insn chain here. */
5060 if (last && NEXT_INSN (last))
5061 abort ();
5064 /* Set up the outer-level insn chain
5065 as the current sequence, saving the previously current one. */
5067 void
5068 push_topmost_sequence ()
5070 struct sequence_stack *stack, *top = NULL;
5072 start_sequence ();
5074 for (stack = seq_stack; stack; stack = stack->next)
5075 top = stack;
5077 first_insn = top->first;
5078 last_insn = top->last;
5079 seq_rtl_expr = top->sequence_rtl_expr;
5082 /* After emitting to the outer-level insn chain, update the outer-level
5083 insn chain, and restore the previous saved state. */
5085 void
5086 pop_topmost_sequence ()
5088 struct sequence_stack *stack, *top = NULL;
5090 for (stack = seq_stack; stack; stack = stack->next)
5091 top = stack;
5093 top->first = first_insn;
5094 top->last = last_insn;
5095 /* ??? Why don't we save seq_rtl_expr here? */
5097 end_sequence ();
5100 /* After emitting to a sequence, restore previous saved state.
5102 To get the contents of the sequence just made, you must call
5103 `get_insns' *before* calling here.
5105 If the compiler might have deferred popping arguments while
5106 generating this sequence, and this sequence will not be immediately
5107 inserted into the instruction stream, use do_pending_stack_adjust
5108 before calling get_insns. That will ensure that the deferred
5109 pops are inserted into this sequence, and not into some random
5110 location in the instruction stream. See INHIBIT_DEFER_POP for more
5111 information about deferred popping of arguments. */
5113 void
5114 end_sequence ()
5116 struct sequence_stack *tem = seq_stack;
5118 first_insn = tem->first;
5119 last_insn = tem->last;
5120 seq_rtl_expr = tem->sequence_rtl_expr;
5121 seq_stack = tem->next;
5123 memset (tem, 0, sizeof (*tem));
5124 tem->next = free_sequence_stack;
5125 free_sequence_stack = tem;
5128 /* This works like end_sequence, but records the old sequence in FIRST
5129 and LAST. */
5131 void
5132 end_full_sequence (first, last)
5133 rtx *first, *last;
5135 *first = first_insn;
5136 *last = last_insn;
5137 end_sequence ();
5140 /* Return 1 if currently emitting into a sequence. */
5143 in_sequence_p ()
5145 return seq_stack != 0;
5148 /* Put the various virtual registers into REGNO_REG_RTX. */
5150 void
5151 init_virtual_regs (es)
5152 struct emit_status *es;
5154 rtx *ptr = es->x_regno_reg_rtx;
5155 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5156 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5157 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5158 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5159 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5163 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5164 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5165 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5166 static int copy_insn_n_scratches;
5168 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5169 copied an ASM_OPERANDS.
5170 In that case, it is the original input-operand vector. */
5171 static rtvec orig_asm_operands_vector;
5173 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5174 copied an ASM_OPERANDS.
5175 In that case, it is the copied input-operand vector. */
5176 static rtvec copy_asm_operands_vector;
5178 /* Likewise for the constraints vector. */
5179 static rtvec orig_asm_constraints_vector;
5180 static rtvec copy_asm_constraints_vector;
5182 /* Recursively create a new copy of an rtx for copy_insn.
5183 This function differs from copy_rtx in that it handles SCRATCHes and
5184 ASM_OPERANDs properly.
5185 Normally, this function is not used directly; use copy_insn as front end.
5186 However, you could first copy an insn pattern with copy_insn and then use
5187 this function afterwards to properly copy any REG_NOTEs containing
5188 SCRATCHes. */
5191 copy_insn_1 (orig)
5192 rtx orig;
5194 rtx copy;
5195 int i, j;
5196 RTX_CODE code;
5197 const char *format_ptr;
5199 code = GET_CODE (orig);
5201 switch (code)
5203 case REG:
5204 case QUEUED:
5205 case CONST_INT:
5206 case CONST_DOUBLE:
5207 case CONST_VECTOR:
5208 case SYMBOL_REF:
5209 case CODE_LABEL:
5210 case PC:
5211 case CC0:
5212 case ADDRESSOF:
5213 return orig;
5215 case SCRATCH:
5216 for (i = 0; i < copy_insn_n_scratches; i++)
5217 if (copy_insn_scratch_in[i] == orig)
5218 return copy_insn_scratch_out[i];
5219 break;
5221 case CONST:
5222 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5223 a LABEL_REF, it isn't sharable. */
5224 if (GET_CODE (XEXP (orig, 0)) == PLUS
5225 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5226 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5227 return orig;
5228 break;
5230 /* A MEM with a constant address is not sharable. The problem is that
5231 the constant address may need to be reloaded. If the mem is shared,
5232 then reloading one copy of this mem will cause all copies to appear
5233 to have been reloaded. */
5235 default:
5236 break;
5239 copy = rtx_alloc (code);
5241 /* Copy the various flags, and other information. We assume that
5242 all fields need copying, and then clear the fields that should
5243 not be copied. That is the sensible default behavior, and forces
5244 us to explicitly document why we are *not* copying a flag. */
5245 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5247 /* We do not copy the USED flag, which is used as a mark bit during
5248 walks over the RTL. */
5249 RTX_FLAG (copy, used) = 0;
5251 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5252 if (GET_RTX_CLASS (code) == 'i')
5254 RTX_FLAG (copy, jump) = 0;
5255 RTX_FLAG (copy, call) = 0;
5256 RTX_FLAG (copy, frame_related) = 0;
5259 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5261 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5263 copy->fld[i] = orig->fld[i];
5264 switch (*format_ptr++)
5266 case 'e':
5267 if (XEXP (orig, i) != NULL)
5268 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5269 break;
5271 case 'E':
5272 case 'V':
5273 if (XVEC (orig, i) == orig_asm_constraints_vector)
5274 XVEC (copy, i) = copy_asm_constraints_vector;
5275 else if (XVEC (orig, i) == orig_asm_operands_vector)
5276 XVEC (copy, i) = copy_asm_operands_vector;
5277 else if (XVEC (orig, i) != NULL)
5279 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5280 for (j = 0; j < XVECLEN (copy, i); j++)
5281 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5283 break;
5285 case 't':
5286 case 'w':
5287 case 'i':
5288 case 's':
5289 case 'S':
5290 case 'u':
5291 case '0':
5292 /* These are left unchanged. */
5293 break;
5295 default:
5296 abort ();
5300 if (code == SCRATCH)
5302 i = copy_insn_n_scratches++;
5303 if (i >= MAX_RECOG_OPERANDS)
5304 abort ();
5305 copy_insn_scratch_in[i] = orig;
5306 copy_insn_scratch_out[i] = copy;
5308 else if (code == ASM_OPERANDS)
5310 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5311 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5312 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5313 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5316 return copy;
5319 /* Create a new copy of an rtx.
5320 This function differs from copy_rtx in that it handles SCRATCHes and
5321 ASM_OPERANDs properly.
5322 INSN doesn't really have to be a full INSN; it could be just the
5323 pattern. */
5325 copy_insn (insn)
5326 rtx insn;
5328 copy_insn_n_scratches = 0;
5329 orig_asm_operands_vector = 0;
5330 orig_asm_constraints_vector = 0;
5331 copy_asm_operands_vector = 0;
5332 copy_asm_constraints_vector = 0;
5333 return copy_insn_1 (insn);
5336 /* Initialize data structures and variables in this file
5337 before generating rtl for each function. */
5339 void
5340 init_emit ()
5342 struct function *f = cfun;
5344 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5345 first_insn = NULL;
5346 last_insn = NULL;
5347 seq_rtl_expr = NULL;
5348 cur_insn_uid = 1;
5349 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5350 last_linenum = 0;
5351 last_filename = 0;
5352 first_label_num = label_num;
5353 last_label_num = 0;
5354 seq_stack = NULL;
5356 /* Init the tables that describe all the pseudo regs. */
5358 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5360 f->emit->regno_pointer_align
5361 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5362 * sizeof (unsigned char));
5364 regno_reg_rtx
5365 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5367 /* Put copies of all the hard registers into regno_reg_rtx. */
5368 memcpy (regno_reg_rtx,
5369 static_regno_reg_rtx,
5370 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5372 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5373 init_virtual_regs (f->emit);
5375 /* Indicate that the virtual registers and stack locations are
5376 all pointers. */
5377 REG_POINTER (stack_pointer_rtx) = 1;
5378 REG_POINTER (frame_pointer_rtx) = 1;
5379 REG_POINTER (hard_frame_pointer_rtx) = 1;
5380 REG_POINTER (arg_pointer_rtx) = 1;
5382 REG_POINTER (virtual_incoming_args_rtx) = 1;
5383 REG_POINTER (virtual_stack_vars_rtx) = 1;
5384 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5385 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5386 REG_POINTER (virtual_cfa_rtx) = 1;
5388 #ifdef STACK_BOUNDARY
5389 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5390 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5391 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5392 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5394 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5395 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5396 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5397 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5398 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5399 #endif
5401 #ifdef INIT_EXPANDERS
5402 INIT_EXPANDERS;
5403 #endif
5406 /* Generate the constant 0. */
5408 static rtx
5409 gen_const_vector_0 (mode)
5410 enum machine_mode mode;
5412 rtx tem;
5413 rtvec v;
5414 int units, i;
5415 enum machine_mode inner;
5417 units = GET_MODE_NUNITS (mode);
5418 inner = GET_MODE_INNER (mode);
5420 v = rtvec_alloc (units);
5422 /* We need to call this function after we to set CONST0_RTX first. */
5423 if (!CONST0_RTX (inner))
5424 abort ();
5426 for (i = 0; i < units; ++i)
5427 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5429 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5430 return tem;
5433 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5434 all elements are zero. */
5436 gen_rtx_CONST_VECTOR (mode, v)
5437 enum machine_mode mode;
5438 rtvec v;
5440 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5441 int i;
5443 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5444 if (RTVEC_ELT (v, i) != inner_zero)
5445 return gen_rtx_raw_CONST_VECTOR (mode, v);
5446 return CONST0_RTX (mode);
5449 /* Create some permanent unique rtl objects shared between all functions.
5450 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5452 void
5453 init_emit_once (line_numbers)
5454 int line_numbers;
5456 int i;
5457 enum machine_mode mode;
5458 enum machine_mode double_mode;
5460 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5461 tables. */
5462 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5463 const_int_htab_eq, NULL);
5465 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5466 const_double_htab_eq, NULL);
5468 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5469 mem_attrs_htab_eq, NULL);
5470 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5471 reg_attrs_htab_eq, NULL);
5473 no_line_numbers = ! line_numbers;
5475 /* Compute the word and byte modes. */
5477 byte_mode = VOIDmode;
5478 word_mode = VOIDmode;
5479 double_mode = VOIDmode;
5481 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5482 mode = GET_MODE_WIDER_MODE (mode))
5484 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5485 && byte_mode == VOIDmode)
5486 byte_mode = mode;
5488 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5489 && word_mode == VOIDmode)
5490 word_mode = mode;
5493 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5494 mode = GET_MODE_WIDER_MODE (mode))
5496 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5497 && double_mode == VOIDmode)
5498 double_mode = mode;
5501 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5503 /* Assign register numbers to the globally defined register rtx.
5504 This must be done at runtime because the register number field
5505 is in a union and some compilers can't initialize unions. */
5507 pc_rtx = gen_rtx (PC, VOIDmode);
5508 cc0_rtx = gen_rtx (CC0, VOIDmode);
5509 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5510 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5511 if (hard_frame_pointer_rtx == 0)
5512 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5513 HARD_FRAME_POINTER_REGNUM);
5514 if (arg_pointer_rtx == 0)
5515 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5516 virtual_incoming_args_rtx =
5517 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5518 virtual_stack_vars_rtx =
5519 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5520 virtual_stack_dynamic_rtx =
5521 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5522 virtual_outgoing_args_rtx =
5523 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5524 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5526 /* Initialize RTL for commonly used hard registers. These are
5527 copied into regno_reg_rtx as we begin to compile each function. */
5528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5529 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5531 #ifdef INIT_EXPANDERS
5532 /* This is to initialize {init|mark|free}_machine_status before the first
5533 call to push_function_context_to. This is needed by the Chill front
5534 end which calls push_function_context_to before the first call to
5535 init_function_start. */
5536 INIT_EXPANDERS;
5537 #endif
5539 /* Create the unique rtx's for certain rtx codes and operand values. */
5541 /* Don't use gen_rtx here since gen_rtx in this case
5542 tries to use these variables. */
5543 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5544 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5545 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5547 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5548 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5549 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5550 else
5551 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5553 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5554 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5555 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5556 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5557 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5559 dconsthalf = dconst1;
5560 dconsthalf.exp--;
5562 for (i = 0; i <= 2; i++)
5564 REAL_VALUE_TYPE *r =
5565 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5567 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5568 mode = GET_MODE_WIDER_MODE (mode))
5569 const_tiny_rtx[i][(int) mode] =
5570 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5572 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5575 mode = GET_MODE_WIDER_MODE (mode))
5576 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5579 mode != VOIDmode;
5580 mode = GET_MODE_WIDER_MODE (mode))
5581 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5585 mode != VOIDmode;
5586 mode = GET_MODE_WIDER_MODE (mode))
5587 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5590 mode != VOIDmode;
5591 mode = GET_MODE_WIDER_MODE (mode))
5592 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5594 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5595 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5596 const_tiny_rtx[0][i] = const0_rtx;
5598 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5599 if (STORE_FLAG_VALUE == 1)
5600 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5602 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5603 return_address_pointer_rtx
5604 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5605 #endif
5607 #ifdef STRUCT_VALUE
5608 struct_value_rtx = STRUCT_VALUE;
5609 #else
5610 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5611 #endif
5613 #ifdef STRUCT_VALUE_INCOMING
5614 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5615 #else
5616 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5617 struct_value_incoming_rtx
5618 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5619 #else
5620 struct_value_incoming_rtx = struct_value_rtx;
5621 #endif
5622 #endif
5624 #ifdef STATIC_CHAIN_REGNUM
5625 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5627 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5628 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5629 static_chain_incoming_rtx
5630 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5631 else
5632 #endif
5633 static_chain_incoming_rtx = static_chain_rtx;
5634 #endif
5636 #ifdef STATIC_CHAIN
5637 static_chain_rtx = STATIC_CHAIN;
5639 #ifdef STATIC_CHAIN_INCOMING
5640 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5641 #else
5642 static_chain_incoming_rtx = static_chain_rtx;
5643 #endif
5644 #endif
5646 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5647 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5650 /* Query and clear/ restore no_line_numbers. This is used by the
5651 switch / case handling in stmt.c to give proper line numbers in
5652 warnings about unreachable code. */
5655 force_line_numbers ()
5657 int old = no_line_numbers;
5659 no_line_numbers = 0;
5660 if (old)
5661 force_next_line_note ();
5662 return old;
5665 void
5666 restore_line_number_status (old_value)
5667 int old_value;
5669 no_line_numbers = old_value;
5672 /* Produce exact duplicate of insn INSN after AFTER.
5673 Care updating of libcall regions if present. */
5676 emit_copy_of_insn_after (insn, after)
5677 rtx insn, after;
5679 rtx new;
5680 rtx note1, note2, link;
5682 switch (GET_CODE (insn))
5684 case INSN:
5685 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5686 break;
5688 case JUMP_INSN:
5689 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5690 break;
5692 case CALL_INSN:
5693 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5694 if (CALL_INSN_FUNCTION_USAGE (insn))
5695 CALL_INSN_FUNCTION_USAGE (new)
5696 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5697 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5698 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5699 break;
5701 default:
5702 abort ();
5705 /* Update LABEL_NUSES. */
5706 mark_jump_label (PATTERN (new), new, 0);
5708 INSN_SCOPE (new) = INSN_SCOPE (insn);
5710 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5711 make them. */
5712 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5713 if (REG_NOTE_KIND (link) != REG_LABEL)
5715 if (GET_CODE (link) == EXPR_LIST)
5716 REG_NOTES (new)
5717 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5718 XEXP (link, 0),
5719 REG_NOTES (new)));
5720 else
5721 REG_NOTES (new)
5722 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5723 XEXP (link, 0),
5724 REG_NOTES (new)));
5727 /* Fix the libcall sequences. */
5728 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5730 rtx p = new;
5731 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5732 p = PREV_INSN (p);
5733 XEXP (note1, 0) = p;
5734 XEXP (note2, 0) = new;
5736 INSN_CODE (new) = INSN_CODE (insn);
5737 return new;
5740 #include "gt-emit-rtl.h"