Fix gnu11 fallout on SPARC
[official-gcc.git] / gcc / emit-rtl.c
blobaa95f68c166f2dbbb8ca07960d563c38037cef84
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "basic-block.h"
43 #include "tree-eh.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "hashtab.h"
47 #include "hash-set.h"
48 #include "vec.h"
49 #include "machmode.h"
50 #include "hard-reg-set.h"
51 #include "input.h"
52 #include "function.h"
53 #include "stringpool.h"
54 #include "expr.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "recog.h"
58 #include "bitmap.h"
59 #include "debug.h"
60 #include "langhooks.h"
61 #include "df.h"
62 #include "params.h"
63 #include "target.h"
64 #include "builtins.h"
65 #include "rtl-iter.h"
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
79 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
81 /* Datastructures maintained for currently processed function in RTL form. */
83 struct rtl_data x_rtl;
85 /* Indexed by pseudo register number, gives the rtx for that pseudo.
86 Allocated in parallel with regno_pointer_align.
87 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
88 with length attribute nested in top level structures. */
90 rtx * regno_reg_rtx;
92 /* This is *not* reset after each function. It gives each CODE_LABEL
93 in the entire compilation a unique label number. */
95 static GTY(()) int label_num = 1;
97 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
99 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
100 is set only for MODE_INT and MODE_VECTOR_INT modes. */
102 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
104 rtx const_true_rtx;
106 REAL_VALUE_TYPE dconst0;
107 REAL_VALUE_TYPE dconst1;
108 REAL_VALUE_TYPE dconst2;
109 REAL_VALUE_TYPE dconstm1;
110 REAL_VALUE_TYPE dconsthalf;
112 /* Record fixed-point constant 0 and 1. */
113 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116 /* We make one copy of (const_int C) where C is in
117 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
118 to save space during the compilation and simplify comparisons of
119 integers. */
121 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
123 /* Standard pieces of rtx, to be substituted directly into things. */
124 rtx pc_rtx;
125 rtx ret_rtx;
126 rtx simple_return_rtx;
127 rtx cc0_rtx;
129 /* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
132 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
133 htab_t const_int_htab;
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136 htab_t const_wide_int_htab;
138 /* A hash table storing register attribute structures. */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
140 htab_t reg_attrs_htab;
142 /* A hash table storing all CONST_DOUBLEs. */
143 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_double_htab;
146 /* A hash table storing all CONST_FIXEDs. */
147 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148 htab_t const_fixed_htab;
150 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
151 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
152 #define first_label_num (crtl->emit.x_first_label_num)
154 static void set_used_decls (tree);
155 static void mark_label_nuses (rtx);
156 static hashval_t const_int_htab_hash (const void *);
157 static int const_int_htab_eq (const void *, const void *);
158 #if TARGET_SUPPORTS_WIDE_INT
159 static hashval_t const_wide_int_htab_hash (const void *);
160 static int const_wide_int_htab_eq (const void *, const void *);
161 static rtx lookup_const_wide_int (rtx);
162 #endif
163 static hashval_t const_double_htab_hash (const void *);
164 static int const_double_htab_eq (const void *, const void *);
165 static rtx lookup_const_double (rtx);
166 static hashval_t const_fixed_htab_hash (const void *);
167 static int const_fixed_htab_eq (const void *, const void *);
168 static rtx lookup_const_fixed (rtx);
169 static hashval_t reg_attrs_htab_hash (const void *);
170 static int reg_attrs_htab_eq (const void *, const void *);
171 static reg_attrs *get_reg_attrs (tree, int);
172 static rtx gen_const_vector (enum machine_mode, int);
173 static void copy_rtx_if_shared_1 (rtx *orig);
175 /* Probability of the conditional branch currently proceeded by try_split.
176 Set to -1 otherwise. */
177 int split_branch_probability = -1;
179 /* Returns a hash code for X (which is a really a CONST_INT). */
181 static hashval_t
182 const_int_htab_hash (const void *x)
184 return (hashval_t) INTVAL ((const_rtx) x);
187 /* Returns nonzero if the value represented by X (which is really a
188 CONST_INT) is the same as that given by Y (which is really a
189 HOST_WIDE_INT *). */
191 static int
192 const_int_htab_eq (const void *x, const void *y)
194 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
197 #if TARGET_SUPPORTS_WIDE_INT
198 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
200 static hashval_t
201 const_wide_int_htab_hash (const void *x)
203 int i;
204 HOST_WIDE_INT hash = 0;
205 const_rtx xr = (const_rtx) x;
207 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
208 hash += CONST_WIDE_INT_ELT (xr, i);
210 return (hashval_t) hash;
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_WIDE_INT) is the same as that given by Y (which is really a
215 CONST_WIDE_INT). */
217 static int
218 const_wide_int_htab_eq (const void *x, const void *y)
220 int i;
221 const_rtx xr = (const_rtx) x;
222 const_rtx yr = (const_rtx) y;
223 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
224 return 0;
226 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
227 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
228 return 0;
230 return 1;
232 #endif
234 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
235 static hashval_t
236 const_double_htab_hash (const void *x)
238 const_rtx const value = (const_rtx) x;
239 hashval_t h;
241 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
242 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
243 else
245 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
246 /* MODE is used in the comparison, so it should be in the hash. */
247 h ^= GET_MODE (value);
249 return h;
252 /* Returns nonzero if the value represented by X (really a ...)
253 is the same as that represented by Y (really a ...) */
254 static int
255 const_double_htab_eq (const void *x, const void *y)
257 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
261 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
269 /* Returns a hash code for X (which is really a CONST_FIXED). */
271 static hashval_t
272 const_fixed_htab_hash (const void *x)
274 const_rtx const value = (const_rtx) x;
275 hashval_t h;
277 h = fixed_hash (CONST_FIXED_VALUE (value));
278 /* MODE is used in the comparison, so it should be in the hash. */
279 h ^= GET_MODE (value);
280 return h;
283 /* Returns nonzero if the value represented by X (really a ...)
284 is the same as that represented by Y (really a ...). */
286 static int
287 const_fixed_htab_eq (const void *x, const void *y)
289 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
291 if (GET_MODE (a) != GET_MODE (b))
292 return 0;
293 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
296 /* Return true if the given memory attributes are equal. */
298 bool
299 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
301 if (p == q)
302 return true;
303 if (!p || !q)
304 return false;
305 return (p->alias == q->alias
306 && p->offset_known_p == q->offset_known_p
307 && (!p->offset_known_p || p->offset == q->offset)
308 && p->size_known_p == q->size_known_p
309 && (!p->size_known_p || p->size == q->size)
310 && p->align == q->align
311 && p->addrspace == q->addrspace
312 && (p->expr == q->expr
313 || (p->expr != NULL_TREE && q->expr != NULL_TREE
314 && operand_equal_p (p->expr, q->expr, 0))));
317 /* Set MEM's memory attributes so that they are the same as ATTRS. */
319 static void
320 set_mem_attrs (rtx mem, mem_attrs *attrs)
322 /* If everything is the default, we can just clear the attributes. */
323 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
325 MEM_ATTRS (mem) = 0;
326 return;
329 if (!MEM_ATTRS (mem)
330 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
332 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
333 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
337 /* Returns a hash code for X (which is a really a reg_attrs *). */
339 static hashval_t
340 reg_attrs_htab_hash (const void *x)
342 const reg_attrs *const p = (const reg_attrs *) x;
344 return ((p->offset * 1000) ^ (intptr_t) p->decl);
347 /* Returns nonzero if the value represented by X (which is really a
348 reg_attrs *) is the same as that given by Y (which is also really a
349 reg_attrs *). */
351 static int
352 reg_attrs_htab_eq (const void *x, const void *y)
354 const reg_attrs *const p = (const reg_attrs *) x;
355 const reg_attrs *const q = (const reg_attrs *) y;
357 return (p->decl == q->decl && p->offset == q->offset);
359 /* Allocate a new reg_attrs structure and insert it into the hash table if
360 one identical to it is not already in the table. We are doing this for
361 MEM of mode MODE. */
363 static reg_attrs *
364 get_reg_attrs (tree decl, int offset)
366 reg_attrs attrs;
367 void **slot;
369 /* If everything is the default, we can just return zero. */
370 if (decl == 0 && offset == 0)
371 return 0;
373 attrs.decl = decl;
374 attrs.offset = offset;
376 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
377 if (*slot == 0)
379 *slot = ggc_alloc<reg_attrs> ();
380 memcpy (*slot, &attrs, sizeof (reg_attrs));
383 return (reg_attrs *) *slot;
387 #if !HAVE_blockage
388 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
389 and to block register equivalences to be seen across this insn. */
392 gen_blockage (void)
394 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
395 MEM_VOLATILE_P (x) = true;
396 return x;
398 #endif
401 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
402 don't attempt to share with the various global pieces of rtl (such as
403 frame_pointer_rtx). */
406 gen_raw_REG (enum machine_mode mode, int regno)
408 rtx x = gen_rtx_raw_REG (mode, regno);
409 ORIGINAL_REGNO (x) = regno;
410 return x;
413 /* There are some RTL codes that require special attention; the generation
414 functions do the raw handling. If you add to this list, modify
415 special_rtx in gengenrtl.c as well. */
417 rtx_expr_list *
418 gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
420 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
421 expr_list));
424 rtx_insn_list *
425 gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
427 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
428 insn_list));
431 rtx_insn *
432 gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
433 basic_block bb, rtx pattern, int location, int code,
434 rtx reg_notes)
436 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
437 prev_insn, next_insn,
438 bb, pattern, location, code,
439 reg_notes));
443 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
445 void **slot;
447 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
448 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
450 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
451 if (const_true_rtx && arg == STORE_FLAG_VALUE)
452 return const_true_rtx;
453 #endif
455 /* Look up the CONST_INT in the hash table. */
456 slot = htab_find_slot_with_hash (const_int_htab, &arg,
457 (hashval_t) arg, INSERT);
458 if (*slot == 0)
459 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
461 return (rtx) *slot;
465 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
467 return GEN_INT (trunc_int_for_mode (c, mode));
470 /* CONST_DOUBLEs might be created from pairs of integers, or from
471 REAL_VALUE_TYPEs. Also, their length is known only at run time,
472 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
474 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
475 hash table. If so, return its counterpart; otherwise add it
476 to the hash table and return it. */
477 static rtx
478 lookup_const_double (rtx real)
480 void **slot = htab_find_slot (const_double_htab, real, INSERT);
481 if (*slot == 0)
482 *slot = real;
484 return (rtx) *slot;
487 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
488 VALUE in mode MODE. */
490 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
492 rtx real = rtx_alloc (CONST_DOUBLE);
493 PUT_MODE (real, mode);
495 real->u.rv = value;
497 return lookup_const_double (real);
500 /* Determine whether FIXED, a CONST_FIXED, already exists in the
501 hash table. If so, return its counterpart; otherwise add it
502 to the hash table and return it. */
504 static rtx
505 lookup_const_fixed (rtx fixed)
507 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
508 if (*slot == 0)
509 *slot = fixed;
511 return (rtx) *slot;
514 /* Return a CONST_FIXED rtx for a fixed-point value specified by
515 VALUE in mode MODE. */
518 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
520 rtx fixed = rtx_alloc (CONST_FIXED);
521 PUT_MODE (fixed, mode);
523 fixed->u.fv = value;
525 return lookup_const_fixed (fixed);
528 #if TARGET_SUPPORTS_WIDE_INT == 0
529 /* Constructs double_int from rtx CST. */
531 double_int
532 rtx_to_double_int (const_rtx cst)
534 double_int r;
536 if (CONST_INT_P (cst))
537 r = double_int::from_shwi (INTVAL (cst));
538 else if (CONST_DOUBLE_AS_INT_P (cst))
540 r.low = CONST_DOUBLE_LOW (cst);
541 r.high = CONST_DOUBLE_HIGH (cst);
543 else
544 gcc_unreachable ();
546 return r;
548 #endif
550 #if TARGET_SUPPORTS_WIDE_INT
551 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
552 If so, return its counterpart; otherwise add it to the hash table and
553 return it. */
555 static rtx
556 lookup_const_wide_int (rtx wint)
558 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
559 if (*slot == 0)
560 *slot = wint;
562 return (rtx) *slot;
564 #endif
566 /* Return an rtx constant for V, given that the constant has mode MODE.
567 The returned rtx will be a CONST_INT if V fits, otherwise it will be
568 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
569 (if TARGET_SUPPORTS_WIDE_INT). */
572 immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
574 unsigned int len = v.get_len ();
575 unsigned int prec = GET_MODE_PRECISION (mode);
577 /* Allow truncation but not extension since we do not know if the
578 number is signed or unsigned. */
579 gcc_assert (prec <= v.get_precision ());
581 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
582 return gen_int_mode (v.elt (0), mode);
584 #if TARGET_SUPPORTS_WIDE_INT
586 unsigned int i;
587 rtx value;
588 unsigned int blocks_needed
589 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
591 if (len > blocks_needed)
592 len = blocks_needed;
594 value = const_wide_int_alloc (len);
596 /* It is so tempting to just put the mode in here. Must control
597 myself ... */
598 PUT_MODE (value, VOIDmode);
599 CWI_PUT_NUM_ELEM (value, len);
601 for (i = 0; i < len; i++)
602 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
604 return lookup_const_wide_int (value);
606 #else
607 return immed_double_const (v.elt (0), v.elt (1), mode);
608 #endif
611 #if TARGET_SUPPORTS_WIDE_INT == 0
612 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
613 of ints: I0 is the low-order word and I1 is the high-order word.
614 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
615 implied upper bits are copies of the high bit of i1. The value
616 itself is neither signed nor unsigned. Do not use this routine for
617 non-integer modes; convert to REAL_VALUE_TYPE and use
618 CONST_DOUBLE_FROM_REAL_VALUE. */
621 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
623 rtx value;
624 unsigned int i;
626 /* There are the following cases (note that there are no modes with
627 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
629 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
630 gen_int_mode.
631 2) If the value of the integer fits into HOST_WIDE_INT anyway
632 (i.e., i1 consists only from copies of the sign bit, and sign
633 of i0 and i1 are the same), then we return a CONST_INT for i0.
634 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
635 if (mode != VOIDmode)
637 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
638 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
639 /* We can get a 0 for an error mark. */
640 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
641 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
643 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
644 return gen_int_mode (i0, mode);
647 /* If this integer fits in one word, return a CONST_INT. */
648 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
649 return GEN_INT (i0);
651 /* We use VOIDmode for integers. */
652 value = rtx_alloc (CONST_DOUBLE);
653 PUT_MODE (value, VOIDmode);
655 CONST_DOUBLE_LOW (value) = i0;
656 CONST_DOUBLE_HIGH (value) = i1;
658 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
659 XWINT (value, i) = 0;
661 return lookup_const_double (value);
663 #endif
666 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
668 /* In case the MD file explicitly references the frame pointer, have
669 all such references point to the same frame pointer. This is
670 used during frame pointer elimination to distinguish the explicit
671 references to these registers from pseudos that happened to be
672 assigned to them.
674 If we have eliminated the frame pointer or arg pointer, we will
675 be using it as a normal register, for example as a spill
676 register. In such cases, we might be accessing it in a mode that
677 is not Pmode and therefore cannot use the pre-allocated rtx.
679 Also don't do this when we are making new REGs in reload, since
680 we don't want to get confused with the real pointers. */
682 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
684 if (regno == FRAME_POINTER_REGNUM
685 && (!reload_completed || frame_pointer_needed))
686 return frame_pointer_rtx;
687 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
688 if (regno == HARD_FRAME_POINTER_REGNUM
689 && (!reload_completed || frame_pointer_needed))
690 return hard_frame_pointer_rtx;
691 #endif
692 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
693 if (regno == ARG_POINTER_REGNUM)
694 return arg_pointer_rtx;
695 #endif
696 #ifdef RETURN_ADDRESS_POINTER_REGNUM
697 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
698 return return_address_pointer_rtx;
699 #endif
700 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
701 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
702 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
703 return pic_offset_table_rtx;
704 if (regno == STACK_POINTER_REGNUM)
705 return stack_pointer_rtx;
708 #if 0
709 /* If the per-function register table has been set up, try to re-use
710 an existing entry in that table to avoid useless generation of RTL.
712 This code is disabled for now until we can fix the various backends
713 which depend on having non-shared hard registers in some cases. Long
714 term we want to re-enable this code as it can significantly cut down
715 on the amount of useless RTL that gets generated.
717 We'll also need to fix some code that runs after reload that wants to
718 set ORIGINAL_REGNO. */
720 if (cfun
721 && cfun->emit
722 && regno_reg_rtx
723 && regno < FIRST_PSEUDO_REGISTER
724 && reg_raw_mode[regno] == mode)
725 return regno_reg_rtx[regno];
726 #endif
728 return gen_raw_REG (mode, regno);
732 gen_rtx_MEM (enum machine_mode mode, rtx addr)
734 rtx rt = gen_rtx_raw_MEM (mode, addr);
736 /* This field is not cleared by the mere allocation of the rtx, so
737 we clear it here. */
738 MEM_ATTRS (rt) = 0;
740 return rt;
743 /* Generate a memory referring to non-trapping constant memory. */
746 gen_const_mem (enum machine_mode mode, rtx addr)
748 rtx mem = gen_rtx_MEM (mode, addr);
749 MEM_READONLY_P (mem) = 1;
750 MEM_NOTRAP_P (mem) = 1;
751 return mem;
754 /* Generate a MEM referring to fixed portions of the frame, e.g., register
755 save areas. */
758 gen_frame_mem (enum machine_mode mode, rtx addr)
760 rtx mem = gen_rtx_MEM (mode, addr);
761 MEM_NOTRAP_P (mem) = 1;
762 set_mem_alias_set (mem, get_frame_alias_set ());
763 return mem;
766 /* Generate a MEM referring to a temporary use of the stack, not part
767 of the fixed stack frame. For example, something which is pushed
768 by a target splitter. */
770 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
772 rtx mem = gen_rtx_MEM (mode, addr);
773 MEM_NOTRAP_P (mem) = 1;
774 if (!cfun->calls_alloca)
775 set_mem_alias_set (mem, get_frame_alias_set ());
776 return mem;
779 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
780 this construct would be valid, and false otherwise. */
782 bool
783 validate_subreg (enum machine_mode omode, enum machine_mode imode,
784 const_rtx reg, unsigned int offset)
786 unsigned int isize = GET_MODE_SIZE (imode);
787 unsigned int osize = GET_MODE_SIZE (omode);
789 /* All subregs must be aligned. */
790 if (offset % osize != 0)
791 return false;
793 /* The subreg offset cannot be outside the inner object. */
794 if (offset >= isize)
795 return false;
797 /* ??? This should not be here. Temporarily continue to allow word_mode
798 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
799 Generally, backends are doing something sketchy but it'll take time to
800 fix them all. */
801 if (omode == word_mode)
803 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
804 is the culprit here, and not the backends. */
805 else if (osize >= UNITS_PER_WORD && isize >= osize)
807 /* Allow component subregs of complex and vector. Though given the below
808 extraction rules, it's not always clear what that means. */
809 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
810 && GET_MODE_INNER (imode) == omode)
812 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
813 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
814 represent this. It's questionable if this ought to be represented at
815 all -- why can't this all be hidden in post-reload splitters that make
816 arbitrarily mode changes to the registers themselves. */
817 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
819 /* Subregs involving floating point modes are not allowed to
820 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
821 (subreg:SI (reg:DF) 0) isn't. */
822 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
824 if (! (isize == osize
825 /* LRA can use subreg to store a floating point value in
826 an integer mode. Although the floating point and the
827 integer modes need the same number of hard registers,
828 the size of floating point mode can be less than the
829 integer mode. LRA also uses subregs for a register
830 should be used in different mode in on insn. */
831 || lra_in_progress))
832 return false;
835 /* Paradoxical subregs must have offset zero. */
836 if (osize > isize)
837 return offset == 0;
839 /* This is a normal subreg. Verify that the offset is representable. */
841 /* For hard registers, we already have most of these rules collected in
842 subreg_offset_representable_p. */
843 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
845 unsigned int regno = REGNO (reg);
847 #ifdef CANNOT_CHANGE_MODE_CLASS
848 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
849 && GET_MODE_INNER (imode) == omode)
851 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
852 return false;
853 #endif
855 return subreg_offset_representable_p (regno, imode, offset, omode);
858 /* For pseudo registers, we want most of the same checks. Namely:
859 If the register no larger than a word, the subreg must be lowpart.
860 If the register is larger than a word, the subreg must be the lowpart
861 of a subword. A subreg does *not* perform arbitrary bit extraction.
862 Given that we've already checked mode/offset alignment, we only have
863 to check subword subregs here. */
864 if (osize < UNITS_PER_WORD
865 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
867 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
868 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
869 if (offset % UNITS_PER_WORD != low_off)
870 return false;
872 return true;
876 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
878 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
879 return gen_rtx_raw_SUBREG (mode, reg, offset);
882 /* Generate a SUBREG representing the least-significant part of REG if MODE
883 is smaller than mode of REG, otherwise paradoxical SUBREG. */
886 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
888 enum machine_mode inmode;
890 inmode = GET_MODE (reg);
891 if (inmode == VOIDmode)
892 inmode = mode;
893 return gen_rtx_SUBREG (mode, reg,
894 subreg_lowpart_offset (mode, inmode));
898 gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
899 enum var_init_status status)
901 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
902 PAT_VAR_LOCATION_STATUS (x) = status;
903 return x;
907 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
909 rtvec
910 gen_rtvec (int n, ...)
912 int i;
913 rtvec rt_val;
914 va_list p;
916 va_start (p, n);
918 /* Don't allocate an empty rtvec... */
919 if (n == 0)
921 va_end (p);
922 return NULL_RTVEC;
925 rt_val = rtvec_alloc (n);
927 for (i = 0; i < n; i++)
928 rt_val->elem[i] = va_arg (p, rtx);
930 va_end (p);
931 return rt_val;
934 rtvec
935 gen_rtvec_v (int n, rtx *argp)
937 int i;
938 rtvec rt_val;
940 /* Don't allocate an empty rtvec... */
941 if (n == 0)
942 return NULL_RTVEC;
944 rt_val = rtvec_alloc (n);
946 for (i = 0; i < n; i++)
947 rt_val->elem[i] = *argp++;
949 return rt_val;
952 rtvec
953 gen_rtvec_v (int n, rtx_insn **argp)
955 int i;
956 rtvec rt_val;
958 /* Don't allocate an empty rtvec... */
959 if (n == 0)
960 return NULL_RTVEC;
962 rt_val = rtvec_alloc (n);
964 for (i = 0; i < n; i++)
965 rt_val->elem[i] = *argp++;
967 return rt_val;
971 /* Return the number of bytes between the start of an OUTER_MODE
972 in-memory value and the start of an INNER_MODE in-memory value,
973 given that the former is a lowpart of the latter. It may be a
974 paradoxical lowpart, in which case the offset will be negative
975 on big-endian targets. */
978 byte_lowpart_offset (enum machine_mode outer_mode,
979 enum machine_mode inner_mode)
981 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
982 return subreg_lowpart_offset (outer_mode, inner_mode);
983 else
984 return -subreg_lowpart_offset (inner_mode, outer_mode);
987 /* Generate a REG rtx for a new pseudo register of mode MODE.
988 This pseudo is assigned the next sequential register number. */
991 gen_reg_rtx (enum machine_mode mode)
993 rtx val;
994 unsigned int align = GET_MODE_ALIGNMENT (mode);
996 gcc_assert (can_create_pseudo_p ());
998 /* If a virtual register with bigger mode alignment is generated,
999 increase stack alignment estimation because it might be spilled
1000 to stack later. */
1001 if (SUPPORTS_STACK_ALIGNMENT
1002 && crtl->stack_alignment_estimated < align
1003 && !crtl->stack_realign_processed)
1005 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1006 if (crtl->stack_alignment_estimated < min_align)
1007 crtl->stack_alignment_estimated = min_align;
1010 if (generating_concat_p
1011 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1012 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1014 /* For complex modes, don't make a single pseudo.
1015 Instead, make a CONCAT of two pseudos.
1016 This allows noncontiguous allocation of the real and imaginary parts,
1017 which makes much better code. Besides, allocating DCmode
1018 pseudos overstrains reload on some machines like the 386. */
1019 rtx realpart, imagpart;
1020 enum machine_mode partmode = GET_MODE_INNER (mode);
1022 realpart = gen_reg_rtx (partmode);
1023 imagpart = gen_reg_rtx (partmode);
1024 return gen_rtx_CONCAT (mode, realpart, imagpart);
1027 /* Do not call gen_reg_rtx with uninitialized crtl. */
1028 gcc_assert (crtl->emit.regno_pointer_align_length);
1030 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1031 enough to have an element for this pseudo reg number. */
1033 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1035 int old_size = crtl->emit.regno_pointer_align_length;
1036 char *tmp;
1037 rtx *new1;
1039 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1040 memset (tmp + old_size, 0, old_size);
1041 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1043 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1044 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1045 regno_reg_rtx = new1;
1047 crtl->emit.regno_pointer_align_length = old_size * 2;
1050 val = gen_raw_REG (mode, reg_rtx_no);
1051 regno_reg_rtx[reg_rtx_no++] = val;
1052 return val;
1055 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1057 bool
1058 reg_is_parm_p (rtx reg)
1060 tree decl;
1062 gcc_assert (REG_P (reg));
1063 decl = REG_EXPR (reg);
1064 return (decl && TREE_CODE (decl) == PARM_DECL);
1067 /* Update NEW with the same attributes as REG, but with OFFSET added
1068 to the REG_OFFSET. */
1070 static void
1071 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1073 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1074 REG_OFFSET (reg) + offset);
1077 /* Generate a register with same attributes as REG, but with OFFSET
1078 added to the REG_OFFSET. */
1081 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1082 int offset)
1084 rtx new_rtx = gen_rtx_REG (mode, regno);
1086 update_reg_offset (new_rtx, reg, offset);
1087 return new_rtx;
1090 /* Generate a new pseudo-register with the same attributes as REG, but
1091 with OFFSET added to the REG_OFFSET. */
1094 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1096 rtx new_rtx = gen_reg_rtx (mode);
1098 update_reg_offset (new_rtx, reg, offset);
1099 return new_rtx;
1102 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1103 new register is a (possibly paradoxical) lowpart of the old one. */
1105 void
1106 adjust_reg_mode (rtx reg, enum machine_mode mode)
1108 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1109 PUT_MODE (reg, mode);
1112 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1113 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1115 void
1116 set_reg_attrs_from_value (rtx reg, rtx x)
1118 int offset;
1119 bool can_be_reg_pointer = true;
1121 /* Don't call mark_reg_pointer for incompatible pointer sign
1122 extension. */
1123 while (GET_CODE (x) == SIGN_EXTEND
1124 || GET_CODE (x) == ZERO_EXTEND
1125 || GET_CODE (x) == TRUNCATE
1126 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1128 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1129 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1130 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1131 can_be_reg_pointer = false;
1132 #endif
1133 x = XEXP (x, 0);
1136 /* Hard registers can be reused for multiple purposes within the same
1137 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1138 on them is wrong. */
1139 if (HARD_REGISTER_P (reg))
1140 return;
1142 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1143 if (MEM_P (x))
1145 if (MEM_OFFSET_KNOWN_P (x))
1146 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1147 MEM_OFFSET (x) + offset);
1148 if (can_be_reg_pointer && MEM_POINTER (x))
1149 mark_reg_pointer (reg, 0);
1151 else if (REG_P (x))
1153 if (REG_ATTRS (x))
1154 update_reg_offset (reg, x, offset);
1155 if (can_be_reg_pointer && REG_POINTER (x))
1156 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1160 /* Generate a REG rtx for a new pseudo register, copying the mode
1161 and attributes from X. */
1164 gen_reg_rtx_and_attrs (rtx x)
1166 rtx reg = gen_reg_rtx (GET_MODE (x));
1167 set_reg_attrs_from_value (reg, x);
1168 return reg;
1171 /* Set the register attributes for registers contained in PARM_RTX.
1172 Use needed values from memory attributes of MEM. */
1174 void
1175 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1177 if (REG_P (parm_rtx))
1178 set_reg_attrs_from_value (parm_rtx, mem);
1179 else if (GET_CODE (parm_rtx) == PARALLEL)
1181 /* Check for a NULL entry in the first slot, used to indicate that the
1182 parameter goes both on the stack and in registers. */
1183 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1184 for (; i < XVECLEN (parm_rtx, 0); i++)
1186 rtx x = XVECEXP (parm_rtx, 0, i);
1187 if (REG_P (XEXP (x, 0)))
1188 REG_ATTRS (XEXP (x, 0))
1189 = get_reg_attrs (MEM_EXPR (mem),
1190 INTVAL (XEXP (x, 1)));
1195 /* Set the REG_ATTRS for registers in value X, given that X represents
1196 decl T. */
1198 void
1199 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1201 if (GET_CODE (x) == SUBREG)
1203 gcc_assert (subreg_lowpart_p (x));
1204 x = SUBREG_REG (x);
1206 if (REG_P (x))
1207 REG_ATTRS (x)
1208 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1209 DECL_MODE (t)));
1210 if (GET_CODE (x) == CONCAT)
1212 if (REG_P (XEXP (x, 0)))
1213 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1214 if (REG_P (XEXP (x, 1)))
1215 REG_ATTRS (XEXP (x, 1))
1216 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1218 if (GET_CODE (x) == PARALLEL)
1220 int i, start;
1222 /* Check for a NULL entry, used to indicate that the parameter goes
1223 both on the stack and in registers. */
1224 if (XEXP (XVECEXP (x, 0, 0), 0))
1225 start = 0;
1226 else
1227 start = 1;
1229 for (i = start; i < XVECLEN (x, 0); i++)
1231 rtx y = XVECEXP (x, 0, i);
1232 if (REG_P (XEXP (y, 0)))
1233 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1238 /* Assign the RTX X to declaration T. */
1240 void
1241 set_decl_rtl (tree t, rtx x)
1243 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1244 if (x)
1245 set_reg_attrs_for_decl_rtl (t, x);
1248 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1249 if the ABI requires the parameter to be passed by reference. */
1251 void
1252 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1254 DECL_INCOMING_RTL (t) = x;
1255 if (x && !by_reference_p)
1256 set_reg_attrs_for_decl_rtl (t, x);
1259 /* Identify REG (which may be a CONCAT) as a user register. */
1261 void
1262 mark_user_reg (rtx reg)
1264 if (GET_CODE (reg) == CONCAT)
1266 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1267 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1269 else
1271 gcc_assert (REG_P (reg));
1272 REG_USERVAR_P (reg) = 1;
1276 /* Identify REG as a probable pointer register and show its alignment
1277 as ALIGN, if nonzero. */
1279 void
1280 mark_reg_pointer (rtx reg, int align)
1282 if (! REG_POINTER (reg))
1284 REG_POINTER (reg) = 1;
1286 if (align)
1287 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1289 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1290 /* We can no-longer be sure just how aligned this pointer is. */
1291 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1294 /* Return 1 plus largest pseudo reg number used in the current function. */
1297 max_reg_num (void)
1299 return reg_rtx_no;
1302 /* Return 1 + the largest label number used so far in the current function. */
1305 max_label_num (void)
1307 return label_num;
1310 /* Return first label number used in this function (if any were used). */
1313 get_first_label_num (void)
1315 return first_label_num;
1318 /* If the rtx for label was created during the expansion of a nested
1319 function, then first_label_num won't include this label number.
1320 Fix this now so that array indices work later. */
1322 void
1323 maybe_set_first_label_num (rtx x)
1325 if (CODE_LABEL_NUMBER (x) < first_label_num)
1326 first_label_num = CODE_LABEL_NUMBER (x);
1329 /* Return a value representing some low-order bits of X, where the number
1330 of low-order bits is given by MODE. Note that no conversion is done
1331 between floating-point and fixed-point values, rather, the bit
1332 representation is returned.
1334 This function handles the cases in common between gen_lowpart, below,
1335 and two variants in cse.c and combine.c. These are the cases that can
1336 be safely handled at all points in the compilation.
1338 If this is not a case we can handle, return 0. */
1341 gen_lowpart_common (enum machine_mode mode, rtx x)
1343 int msize = GET_MODE_SIZE (mode);
1344 int xsize;
1345 int offset = 0;
1346 enum machine_mode innermode;
1348 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1349 so we have to make one up. Yuk. */
1350 innermode = GET_MODE (x);
1351 if (CONST_INT_P (x)
1352 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1353 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1354 else if (innermode == VOIDmode)
1355 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1357 xsize = GET_MODE_SIZE (innermode);
1359 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1361 if (innermode == mode)
1362 return x;
1364 /* MODE must occupy no more words than the mode of X. */
1365 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1366 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1367 return 0;
1369 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1370 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1371 return 0;
1373 offset = subreg_lowpart_offset (mode, innermode);
1375 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1376 && (GET_MODE_CLASS (mode) == MODE_INT
1377 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1379 /* If we are getting the low-order part of something that has been
1380 sign- or zero-extended, we can either just use the object being
1381 extended or make a narrower extension. If we want an even smaller
1382 piece than the size of the object being extended, call ourselves
1383 recursively.
1385 This case is used mostly by combine and cse. */
1387 if (GET_MODE (XEXP (x, 0)) == mode)
1388 return XEXP (x, 0);
1389 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1390 return gen_lowpart_common (mode, XEXP (x, 0));
1391 else if (msize < xsize)
1392 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1394 else if (GET_CODE (x) == SUBREG || REG_P (x)
1395 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1396 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1397 return simplify_gen_subreg (mode, x, innermode, offset);
1399 /* Otherwise, we can't do this. */
1400 return 0;
1404 gen_highpart (enum machine_mode mode, rtx x)
1406 unsigned int msize = GET_MODE_SIZE (mode);
1407 rtx result;
1409 /* This case loses if X is a subreg. To catch bugs early,
1410 complain if an invalid MODE is used even in other cases. */
1411 gcc_assert (msize <= UNITS_PER_WORD
1412 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1414 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1415 subreg_highpart_offset (mode, GET_MODE (x)));
1416 gcc_assert (result);
1418 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1419 the target if we have a MEM. gen_highpart must return a valid operand,
1420 emitting code if necessary to do so. */
1421 if (MEM_P (result))
1423 result = validize_mem (result);
1424 gcc_assert (result);
1427 return result;
1430 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1431 be VOIDmode constant. */
1433 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1435 if (GET_MODE (exp) != VOIDmode)
1437 gcc_assert (GET_MODE (exp) == innermode);
1438 return gen_highpart (outermode, exp);
1440 return simplify_gen_subreg (outermode, exp, innermode,
1441 subreg_highpart_offset (outermode, innermode));
1444 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1446 unsigned int
1447 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1449 unsigned int offset = 0;
1450 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1452 if (difference > 0)
1454 if (WORDS_BIG_ENDIAN)
1455 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1456 if (BYTES_BIG_ENDIAN)
1457 offset += difference % UNITS_PER_WORD;
1460 return offset;
1463 /* Return offset in bytes to get OUTERMODE high part
1464 of the value in mode INNERMODE stored in memory in target format. */
1465 unsigned int
1466 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1468 unsigned int offset = 0;
1469 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1471 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1473 if (difference > 0)
1475 if (! WORDS_BIG_ENDIAN)
1476 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1477 if (! BYTES_BIG_ENDIAN)
1478 offset += difference % UNITS_PER_WORD;
1481 return offset;
1484 /* Return 1 iff X, assumed to be a SUBREG,
1485 refers to the least significant part of its containing reg.
1486 If X is not a SUBREG, always return 1 (it is its own low part!). */
1489 subreg_lowpart_p (const_rtx x)
1491 if (GET_CODE (x) != SUBREG)
1492 return 1;
1493 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1494 return 0;
1496 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1497 == SUBREG_BYTE (x));
1500 /* Return true if X is a paradoxical subreg, false otherwise. */
1501 bool
1502 paradoxical_subreg_p (const_rtx x)
1504 if (GET_CODE (x) != SUBREG)
1505 return false;
1506 return (GET_MODE_PRECISION (GET_MODE (x))
1507 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1510 /* Return subword OFFSET of operand OP.
1511 The word number, OFFSET, is interpreted as the word number starting
1512 at the low-order address. OFFSET 0 is the low-order word if not
1513 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1515 If we cannot extract the required word, we return zero. Otherwise,
1516 an rtx corresponding to the requested word will be returned.
1518 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1519 reload has completed, a valid address will always be returned. After
1520 reload, if a valid address cannot be returned, we return zero.
1522 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1523 it is the responsibility of the caller.
1525 MODE is the mode of OP in case it is a CONST_INT.
1527 ??? This is still rather broken for some cases. The problem for the
1528 moment is that all callers of this thing provide no 'goal mode' to
1529 tell us to work with. This exists because all callers were written
1530 in a word based SUBREG world.
1531 Now use of this function can be deprecated by simplify_subreg in most
1532 cases.
1536 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1538 if (mode == VOIDmode)
1539 mode = GET_MODE (op);
1541 gcc_assert (mode != VOIDmode);
1543 /* If OP is narrower than a word, fail. */
1544 if (mode != BLKmode
1545 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1546 return 0;
1548 /* If we want a word outside OP, return zero. */
1549 if (mode != BLKmode
1550 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1551 return const0_rtx;
1553 /* Form a new MEM at the requested address. */
1554 if (MEM_P (op))
1556 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1558 if (! validate_address)
1559 return new_rtx;
1561 else if (reload_completed)
1563 if (! strict_memory_address_addr_space_p (word_mode,
1564 XEXP (new_rtx, 0),
1565 MEM_ADDR_SPACE (op)))
1566 return 0;
1568 else
1569 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1572 /* Rest can be handled by simplify_subreg. */
1573 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1576 /* Similar to `operand_subword', but never return 0. If we can't
1577 extract the required subword, put OP into a register and try again.
1578 The second attempt must succeed. We always validate the address in
1579 this case.
1581 MODE is the mode of OP, in case it is CONST_INT. */
1584 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1586 rtx result = operand_subword (op, offset, 1, mode);
1588 if (result)
1589 return result;
1591 if (mode != BLKmode && mode != VOIDmode)
1593 /* If this is a register which can not be accessed by words, copy it
1594 to a pseudo register. */
1595 if (REG_P (op))
1596 op = copy_to_reg (op);
1597 else
1598 op = force_reg (mode, op);
1601 result = operand_subword (op, offset, 1, mode);
1602 gcc_assert (result);
1604 return result;
1607 /* Returns 1 if both MEM_EXPR can be considered equal
1608 and 0 otherwise. */
1611 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1613 if (expr1 == expr2)
1614 return 1;
1616 if (! expr1 || ! expr2)
1617 return 0;
1619 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1620 return 0;
1622 return operand_equal_p (expr1, expr2, 0);
1625 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1626 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1627 -1 if not known. */
1630 get_mem_align_offset (rtx mem, unsigned int align)
1632 tree expr;
1633 unsigned HOST_WIDE_INT offset;
1635 /* This function can't use
1636 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1637 || (MAX (MEM_ALIGN (mem),
1638 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1639 < align))
1640 return -1;
1641 else
1642 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1643 for two reasons:
1644 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1645 for <variable>. get_inner_reference doesn't handle it and
1646 even if it did, the alignment in that case needs to be determined
1647 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1648 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1649 isn't sufficiently aligned, the object it is in might be. */
1650 gcc_assert (MEM_P (mem));
1651 expr = MEM_EXPR (mem);
1652 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1653 return -1;
1655 offset = MEM_OFFSET (mem);
1656 if (DECL_P (expr))
1658 if (DECL_ALIGN (expr) < align)
1659 return -1;
1661 else if (INDIRECT_REF_P (expr))
1663 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1664 return -1;
1666 else if (TREE_CODE (expr) == COMPONENT_REF)
1668 while (1)
1670 tree inner = TREE_OPERAND (expr, 0);
1671 tree field = TREE_OPERAND (expr, 1);
1672 tree byte_offset = component_ref_field_offset (expr);
1673 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1675 if (!byte_offset
1676 || !tree_fits_uhwi_p (byte_offset)
1677 || !tree_fits_uhwi_p (bit_offset))
1678 return -1;
1680 offset += tree_to_uhwi (byte_offset);
1681 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1683 if (inner == NULL_TREE)
1685 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1686 < (unsigned int) align)
1687 return -1;
1688 break;
1690 else if (DECL_P (inner))
1692 if (DECL_ALIGN (inner) < align)
1693 return -1;
1694 break;
1696 else if (TREE_CODE (inner) != COMPONENT_REF)
1697 return -1;
1698 expr = inner;
1701 else
1702 return -1;
1704 return offset & ((align / BITS_PER_UNIT) - 1);
1707 /* Given REF (a MEM) and T, either the type of X or the expression
1708 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1709 if we are making a new object of this type. BITPOS is nonzero if
1710 there is an offset outstanding on T that will be applied later. */
1712 void
1713 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1714 HOST_WIDE_INT bitpos)
1716 HOST_WIDE_INT apply_bitpos = 0;
1717 tree type;
1718 struct mem_attrs attrs, *defattrs, *refattrs;
1719 addr_space_t as;
1721 /* It can happen that type_for_mode was given a mode for which there
1722 is no language-level type. In which case it returns NULL, which
1723 we can see here. */
1724 if (t == NULL_TREE)
1725 return;
1727 type = TYPE_P (t) ? t : TREE_TYPE (t);
1728 if (type == error_mark_node)
1729 return;
1731 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1732 wrong answer, as it assumes that DECL_RTL already has the right alias
1733 info. Callers should not set DECL_RTL until after the call to
1734 set_mem_attributes. */
1735 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1737 memset (&attrs, 0, sizeof (attrs));
1739 /* Get the alias set from the expression or type (perhaps using a
1740 front-end routine) and use it. */
1741 attrs.alias = get_alias_set (t);
1743 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1744 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1746 /* Default values from pre-existing memory attributes if present. */
1747 refattrs = MEM_ATTRS (ref);
1748 if (refattrs)
1750 /* ??? Can this ever happen? Calling this routine on a MEM that
1751 already carries memory attributes should probably be invalid. */
1752 attrs.expr = refattrs->expr;
1753 attrs.offset_known_p = refattrs->offset_known_p;
1754 attrs.offset = refattrs->offset;
1755 attrs.size_known_p = refattrs->size_known_p;
1756 attrs.size = refattrs->size;
1757 attrs.align = refattrs->align;
1760 /* Otherwise, default values from the mode of the MEM reference. */
1761 else
1763 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1764 gcc_assert (!defattrs->expr);
1765 gcc_assert (!defattrs->offset_known_p);
1767 /* Respect mode size. */
1768 attrs.size_known_p = defattrs->size_known_p;
1769 attrs.size = defattrs->size;
1770 /* ??? Is this really necessary? We probably should always get
1771 the size from the type below. */
1773 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1774 if T is an object, always compute the object alignment below. */
1775 if (TYPE_P (t))
1776 attrs.align = defattrs->align;
1777 else
1778 attrs.align = BITS_PER_UNIT;
1779 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1780 e.g. if the type carries an alignment attribute. Should we be
1781 able to simply always use TYPE_ALIGN? */
1784 /* We can set the alignment from the type if we are making an object,
1785 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1786 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1787 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1789 /* If the size is known, we can set that. */
1790 tree new_size = TYPE_SIZE_UNIT (type);
1792 /* The address-space is that of the type. */
1793 as = TYPE_ADDR_SPACE (type);
1795 /* If T is not a type, we may be able to deduce some more information about
1796 the expression. */
1797 if (! TYPE_P (t))
1799 tree base;
1801 if (TREE_THIS_VOLATILE (t))
1802 MEM_VOLATILE_P (ref) = 1;
1804 /* Now remove any conversions: they don't change what the underlying
1805 object is. Likewise for SAVE_EXPR. */
1806 while (CONVERT_EXPR_P (t)
1807 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1808 || TREE_CODE (t) == SAVE_EXPR)
1809 t = TREE_OPERAND (t, 0);
1811 /* Note whether this expression can trap. */
1812 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1814 base = get_base_address (t);
1815 if (base)
1817 if (DECL_P (base)
1818 && TREE_READONLY (base)
1819 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1820 && !TREE_THIS_VOLATILE (base))
1821 MEM_READONLY_P (ref) = 1;
1823 /* Mark static const strings readonly as well. */
1824 if (TREE_CODE (base) == STRING_CST
1825 && TREE_READONLY (base)
1826 && TREE_STATIC (base))
1827 MEM_READONLY_P (ref) = 1;
1829 /* Address-space information is on the base object. */
1830 if (TREE_CODE (base) == MEM_REF
1831 || TREE_CODE (base) == TARGET_MEM_REF)
1832 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1833 0))));
1834 else
1835 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1838 /* If this expression uses it's parent's alias set, mark it such
1839 that we won't change it. */
1840 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1841 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1843 /* If this is a decl, set the attributes of the MEM from it. */
1844 if (DECL_P (t))
1846 attrs.expr = t;
1847 attrs.offset_known_p = true;
1848 attrs.offset = 0;
1849 apply_bitpos = bitpos;
1850 new_size = DECL_SIZE_UNIT (t);
1853 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1854 else if (CONSTANT_CLASS_P (t))
1857 /* If this is a field reference, record it. */
1858 else if (TREE_CODE (t) == COMPONENT_REF)
1860 attrs.expr = t;
1861 attrs.offset_known_p = true;
1862 attrs.offset = 0;
1863 apply_bitpos = bitpos;
1864 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1865 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1868 /* If this is an array reference, look for an outer field reference. */
1869 else if (TREE_CODE (t) == ARRAY_REF)
1871 tree off_tree = size_zero_node;
1872 /* We can't modify t, because we use it at the end of the
1873 function. */
1874 tree t2 = t;
1878 tree index = TREE_OPERAND (t2, 1);
1879 tree low_bound = array_ref_low_bound (t2);
1880 tree unit_size = array_ref_element_size (t2);
1882 /* We assume all arrays have sizes that are a multiple of a byte.
1883 First subtract the lower bound, if any, in the type of the
1884 index, then convert to sizetype and multiply by the size of
1885 the array element. */
1886 if (! integer_zerop (low_bound))
1887 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1888 index, low_bound);
1890 off_tree = size_binop (PLUS_EXPR,
1891 size_binop (MULT_EXPR,
1892 fold_convert (sizetype,
1893 index),
1894 unit_size),
1895 off_tree);
1896 t2 = TREE_OPERAND (t2, 0);
1898 while (TREE_CODE (t2) == ARRAY_REF);
1900 if (DECL_P (t2)
1901 || TREE_CODE (t2) == COMPONENT_REF)
1903 attrs.expr = t2;
1904 attrs.offset_known_p = false;
1905 if (tree_fits_uhwi_p (off_tree))
1907 attrs.offset_known_p = true;
1908 attrs.offset = tree_to_uhwi (off_tree);
1909 apply_bitpos = bitpos;
1912 /* Else do not record a MEM_EXPR. */
1915 /* If this is an indirect reference, record it. */
1916 else if (TREE_CODE (t) == MEM_REF
1917 || TREE_CODE (t) == TARGET_MEM_REF)
1919 attrs.expr = t;
1920 attrs.offset_known_p = true;
1921 attrs.offset = 0;
1922 apply_bitpos = bitpos;
1925 /* Compute the alignment. */
1926 unsigned int obj_align;
1927 unsigned HOST_WIDE_INT obj_bitpos;
1928 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1929 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1930 if (obj_bitpos != 0)
1931 obj_align = (obj_bitpos & -obj_bitpos);
1932 attrs.align = MAX (attrs.align, obj_align);
1935 if (tree_fits_uhwi_p (new_size))
1937 attrs.size_known_p = true;
1938 attrs.size = tree_to_uhwi (new_size);
1941 /* If we modified OFFSET based on T, then subtract the outstanding
1942 bit position offset. Similarly, increase the size of the accessed
1943 object to contain the negative offset. */
1944 if (apply_bitpos)
1946 gcc_assert (attrs.offset_known_p);
1947 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1948 if (attrs.size_known_p)
1949 attrs.size += apply_bitpos / BITS_PER_UNIT;
1952 /* Now set the attributes we computed above. */
1953 attrs.addrspace = as;
1954 set_mem_attrs (ref, &attrs);
1957 void
1958 set_mem_attributes (rtx ref, tree t, int objectp)
1960 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1963 /* Set the alias set of MEM to SET. */
1965 void
1966 set_mem_alias_set (rtx mem, alias_set_type set)
1968 struct mem_attrs attrs;
1970 /* If the new and old alias sets don't conflict, something is wrong. */
1971 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1972 attrs = *get_mem_attrs (mem);
1973 attrs.alias = set;
1974 set_mem_attrs (mem, &attrs);
1977 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1979 void
1980 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1982 struct mem_attrs attrs;
1984 attrs = *get_mem_attrs (mem);
1985 attrs.addrspace = addrspace;
1986 set_mem_attrs (mem, &attrs);
1989 /* Set the alignment of MEM to ALIGN bits. */
1991 void
1992 set_mem_align (rtx mem, unsigned int align)
1994 struct mem_attrs attrs;
1996 attrs = *get_mem_attrs (mem);
1997 attrs.align = align;
1998 set_mem_attrs (mem, &attrs);
2001 /* Set the expr for MEM to EXPR. */
2003 void
2004 set_mem_expr (rtx mem, tree expr)
2006 struct mem_attrs attrs;
2008 attrs = *get_mem_attrs (mem);
2009 attrs.expr = expr;
2010 set_mem_attrs (mem, &attrs);
2013 /* Set the offset of MEM to OFFSET. */
2015 void
2016 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2018 struct mem_attrs attrs;
2020 attrs = *get_mem_attrs (mem);
2021 attrs.offset_known_p = true;
2022 attrs.offset = offset;
2023 set_mem_attrs (mem, &attrs);
2026 /* Clear the offset of MEM. */
2028 void
2029 clear_mem_offset (rtx mem)
2031 struct mem_attrs attrs;
2033 attrs = *get_mem_attrs (mem);
2034 attrs.offset_known_p = false;
2035 set_mem_attrs (mem, &attrs);
2038 /* Set the size of MEM to SIZE. */
2040 void
2041 set_mem_size (rtx mem, HOST_WIDE_INT size)
2043 struct mem_attrs attrs;
2045 attrs = *get_mem_attrs (mem);
2046 attrs.size_known_p = true;
2047 attrs.size = size;
2048 set_mem_attrs (mem, &attrs);
2051 /* Clear the size of MEM. */
2053 void
2054 clear_mem_size (rtx mem)
2056 struct mem_attrs attrs;
2058 attrs = *get_mem_attrs (mem);
2059 attrs.size_known_p = false;
2060 set_mem_attrs (mem, &attrs);
2063 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2064 and its address changed to ADDR. (VOIDmode means don't change the mode.
2065 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2066 returned memory location is required to be valid. INPLACE is true if any
2067 changes can be made directly to MEMREF or false if MEMREF must be treated
2068 as immutable.
2070 The memory attributes are not changed. */
2072 static rtx
2073 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2074 bool inplace)
2076 addr_space_t as;
2077 rtx new_rtx;
2079 gcc_assert (MEM_P (memref));
2080 as = MEM_ADDR_SPACE (memref);
2081 if (mode == VOIDmode)
2082 mode = GET_MODE (memref);
2083 if (addr == 0)
2084 addr = XEXP (memref, 0);
2085 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2086 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2087 return memref;
2089 /* Don't validate address for LRA. LRA can make the address valid
2090 by itself in most efficient way. */
2091 if (validate && !lra_in_progress)
2093 if (reload_in_progress || reload_completed)
2094 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2095 else
2096 addr = memory_address_addr_space (mode, addr, as);
2099 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2100 return memref;
2102 if (inplace)
2104 XEXP (memref, 0) = addr;
2105 return memref;
2108 new_rtx = gen_rtx_MEM (mode, addr);
2109 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2110 return new_rtx;
2113 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2114 way we are changing MEMREF, so we only preserve the alias set. */
2117 change_address (rtx memref, enum machine_mode mode, rtx addr)
2119 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2120 enum machine_mode mmode = GET_MODE (new_rtx);
2121 struct mem_attrs attrs, *defattrs;
2123 attrs = *get_mem_attrs (memref);
2124 defattrs = mode_mem_attrs[(int) mmode];
2125 attrs.expr = NULL_TREE;
2126 attrs.offset_known_p = false;
2127 attrs.size_known_p = defattrs->size_known_p;
2128 attrs.size = defattrs->size;
2129 attrs.align = defattrs->align;
2131 /* If there are no changes, just return the original memory reference. */
2132 if (new_rtx == memref)
2134 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2135 return new_rtx;
2137 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2138 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2141 set_mem_attrs (new_rtx, &attrs);
2142 return new_rtx;
2145 /* Return a memory reference like MEMREF, but with its mode changed
2146 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2147 nonzero, the memory address is forced to be valid.
2148 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2149 and the caller is responsible for adjusting MEMREF base register.
2150 If ADJUST_OBJECT is zero, the underlying object associated with the
2151 memory reference is left unchanged and the caller is responsible for
2152 dealing with it. Otherwise, if the new memory reference is outside
2153 the underlying object, even partially, then the object is dropped.
2154 SIZE, if nonzero, is the size of an access in cases where MODE
2155 has no inherent size. */
2158 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2159 int validate, int adjust_address, int adjust_object,
2160 HOST_WIDE_INT size)
2162 rtx addr = XEXP (memref, 0);
2163 rtx new_rtx;
2164 enum machine_mode address_mode;
2165 int pbits;
2166 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2167 unsigned HOST_WIDE_INT max_align;
2168 #ifdef POINTERS_EXTEND_UNSIGNED
2169 enum machine_mode pointer_mode
2170 = targetm.addr_space.pointer_mode (attrs.addrspace);
2171 #endif
2173 /* VOIDmode means no mode change for change_address_1. */
2174 if (mode == VOIDmode)
2175 mode = GET_MODE (memref);
2177 /* Take the size of non-BLKmode accesses from the mode. */
2178 defattrs = mode_mem_attrs[(int) mode];
2179 if (defattrs->size_known_p)
2180 size = defattrs->size;
2182 /* If there are no changes, just return the original memory reference. */
2183 if (mode == GET_MODE (memref) && !offset
2184 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2185 && (!validate || memory_address_addr_space_p (mode, addr,
2186 attrs.addrspace)))
2187 return memref;
2189 /* ??? Prefer to create garbage instead of creating shared rtl.
2190 This may happen even if offset is nonzero -- consider
2191 (plus (plus reg reg) const_int) -- so do this always. */
2192 addr = copy_rtx (addr);
2194 /* Convert a possibly large offset to a signed value within the
2195 range of the target address space. */
2196 address_mode = get_address_mode (memref);
2197 pbits = GET_MODE_BITSIZE (address_mode);
2198 if (HOST_BITS_PER_WIDE_INT > pbits)
2200 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2201 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2202 >> shift);
2205 if (adjust_address)
2207 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2208 object, we can merge it into the LO_SUM. */
2209 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2210 && offset >= 0
2211 && (unsigned HOST_WIDE_INT) offset
2212 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2213 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2214 plus_constant (address_mode,
2215 XEXP (addr, 1), offset));
2216 #ifdef POINTERS_EXTEND_UNSIGNED
2217 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2218 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2219 the fact that pointers are not allowed to overflow. */
2220 else if (POINTERS_EXTEND_UNSIGNED > 0
2221 && GET_CODE (addr) == ZERO_EXTEND
2222 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2223 && trunc_int_for_mode (offset, pointer_mode) == offset)
2224 addr = gen_rtx_ZERO_EXTEND (address_mode,
2225 plus_constant (pointer_mode,
2226 XEXP (addr, 0), offset));
2227 #endif
2228 else
2229 addr = plus_constant (address_mode, addr, offset);
2232 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2234 /* If the address is a REG, change_address_1 rightfully returns memref,
2235 but this would destroy memref's MEM_ATTRS. */
2236 if (new_rtx == memref && offset != 0)
2237 new_rtx = copy_rtx (new_rtx);
2239 /* Conservatively drop the object if we don't know where we start from. */
2240 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2242 attrs.expr = NULL_TREE;
2243 attrs.alias = 0;
2246 /* Compute the new values of the memory attributes due to this adjustment.
2247 We add the offsets and update the alignment. */
2248 if (attrs.offset_known_p)
2250 attrs.offset += offset;
2252 /* Drop the object if the new left end is not within its bounds. */
2253 if (adjust_object && attrs.offset < 0)
2255 attrs.expr = NULL_TREE;
2256 attrs.alias = 0;
2260 /* Compute the new alignment by taking the MIN of the alignment and the
2261 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2262 if zero. */
2263 if (offset != 0)
2265 max_align = (offset & -offset) * BITS_PER_UNIT;
2266 attrs.align = MIN (attrs.align, max_align);
2269 if (size)
2271 /* Drop the object if the new right end is not within its bounds. */
2272 if (adjust_object && (offset + size) > attrs.size)
2274 attrs.expr = NULL_TREE;
2275 attrs.alias = 0;
2277 attrs.size_known_p = true;
2278 attrs.size = size;
2280 else if (attrs.size_known_p)
2282 gcc_assert (!adjust_object);
2283 attrs.size -= offset;
2284 /* ??? The store_by_pieces machinery generates negative sizes,
2285 so don't assert for that here. */
2288 set_mem_attrs (new_rtx, &attrs);
2290 return new_rtx;
2293 /* Return a memory reference like MEMREF, but with its mode changed
2294 to MODE and its address changed to ADDR, which is assumed to be
2295 MEMREF offset by OFFSET bytes. If VALIDATE is
2296 nonzero, the memory address is forced to be valid. */
2299 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2300 HOST_WIDE_INT offset, int validate)
2302 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2303 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2306 /* Return a memory reference like MEMREF, but whose address is changed by
2307 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2308 known to be in OFFSET (possibly 1). */
2311 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2313 rtx new_rtx, addr = XEXP (memref, 0);
2314 enum machine_mode address_mode;
2315 struct mem_attrs attrs, *defattrs;
2317 attrs = *get_mem_attrs (memref);
2318 address_mode = get_address_mode (memref);
2319 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2321 /* At this point we don't know _why_ the address is invalid. It
2322 could have secondary memory references, multiplies or anything.
2324 However, if we did go and rearrange things, we can wind up not
2325 being able to recognize the magic around pic_offset_table_rtx.
2326 This stuff is fragile, and is yet another example of why it is
2327 bad to expose PIC machinery too early. */
2328 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2329 attrs.addrspace)
2330 && GET_CODE (addr) == PLUS
2331 && XEXP (addr, 0) == pic_offset_table_rtx)
2333 addr = force_reg (GET_MODE (addr), addr);
2334 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2337 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2338 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2340 /* If there are no changes, just return the original memory reference. */
2341 if (new_rtx == memref)
2342 return new_rtx;
2344 /* Update the alignment to reflect the offset. Reset the offset, which
2345 we don't know. */
2346 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2347 attrs.offset_known_p = false;
2348 attrs.size_known_p = defattrs->size_known_p;
2349 attrs.size = defattrs->size;
2350 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2351 set_mem_attrs (new_rtx, &attrs);
2352 return new_rtx;
2355 /* Return a memory reference like MEMREF, but with its address changed to
2356 ADDR. The caller is asserting that the actual piece of memory pointed
2357 to is the same, just the form of the address is being changed, such as
2358 by putting something into a register. INPLACE is true if any changes
2359 can be made directly to MEMREF or false if MEMREF must be treated as
2360 immutable. */
2363 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2365 /* change_address_1 copies the memory attribute structure without change
2366 and that's exactly what we want here. */
2367 update_temp_slot_address (XEXP (memref, 0), addr);
2368 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2371 /* Likewise, but the reference is not required to be valid. */
2374 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2376 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2379 /* Return a memory reference like MEMREF, but with its mode widened to
2380 MODE and offset by OFFSET. This would be used by targets that e.g.
2381 cannot issue QImode memory operations and have to use SImode memory
2382 operations plus masking logic. */
2385 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2387 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2388 struct mem_attrs attrs;
2389 unsigned int size = GET_MODE_SIZE (mode);
2391 /* If there are no changes, just return the original memory reference. */
2392 if (new_rtx == memref)
2393 return new_rtx;
2395 attrs = *get_mem_attrs (new_rtx);
2397 /* If we don't know what offset we were at within the expression, then
2398 we can't know if we've overstepped the bounds. */
2399 if (! attrs.offset_known_p)
2400 attrs.expr = NULL_TREE;
2402 while (attrs.expr)
2404 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2406 tree field = TREE_OPERAND (attrs.expr, 1);
2407 tree offset = component_ref_field_offset (attrs.expr);
2409 if (! DECL_SIZE_UNIT (field))
2411 attrs.expr = NULL_TREE;
2412 break;
2415 /* Is the field at least as large as the access? If so, ok,
2416 otherwise strip back to the containing structure. */
2417 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2418 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2419 && attrs.offset >= 0)
2420 break;
2422 if (! tree_fits_uhwi_p (offset))
2424 attrs.expr = NULL_TREE;
2425 break;
2428 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2429 attrs.offset += tree_to_uhwi (offset);
2430 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2431 / BITS_PER_UNIT);
2433 /* Similarly for the decl. */
2434 else if (DECL_P (attrs.expr)
2435 && DECL_SIZE_UNIT (attrs.expr)
2436 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2437 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2438 && (! attrs.offset_known_p || attrs.offset >= 0))
2439 break;
2440 else
2442 /* The widened memory access overflows the expression, which means
2443 that it could alias another expression. Zap it. */
2444 attrs.expr = NULL_TREE;
2445 break;
2449 if (! attrs.expr)
2450 attrs.offset_known_p = false;
2452 /* The widened memory may alias other stuff, so zap the alias set. */
2453 /* ??? Maybe use get_alias_set on any remaining expression. */
2454 attrs.alias = 0;
2455 attrs.size_known_p = true;
2456 attrs.size = size;
2457 set_mem_attrs (new_rtx, &attrs);
2458 return new_rtx;
2461 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2462 static GTY(()) tree spill_slot_decl;
2464 tree
2465 get_spill_slot_decl (bool force_build_p)
2467 tree d = spill_slot_decl;
2468 rtx rd;
2469 struct mem_attrs attrs;
2471 if (d || !force_build_p)
2472 return d;
2474 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2475 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2476 DECL_ARTIFICIAL (d) = 1;
2477 DECL_IGNORED_P (d) = 1;
2478 TREE_USED (d) = 1;
2479 spill_slot_decl = d;
2481 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2482 MEM_NOTRAP_P (rd) = 1;
2483 attrs = *mode_mem_attrs[(int) BLKmode];
2484 attrs.alias = new_alias_set ();
2485 attrs.expr = d;
2486 set_mem_attrs (rd, &attrs);
2487 SET_DECL_RTL (d, rd);
2489 return d;
2492 /* Given MEM, a result from assign_stack_local, fill in the memory
2493 attributes as appropriate for a register allocator spill slot.
2494 These slots are not aliasable by other memory. We arrange for
2495 them all to use a single MEM_EXPR, so that the aliasing code can
2496 work properly in the case of shared spill slots. */
2498 void
2499 set_mem_attrs_for_spill (rtx mem)
2501 struct mem_attrs attrs;
2502 rtx addr;
2504 attrs = *get_mem_attrs (mem);
2505 attrs.expr = get_spill_slot_decl (true);
2506 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2507 attrs.addrspace = ADDR_SPACE_GENERIC;
2509 /* We expect the incoming memory to be of the form:
2510 (mem:MODE (plus (reg sfp) (const_int offset)))
2511 with perhaps the plus missing for offset = 0. */
2512 addr = XEXP (mem, 0);
2513 attrs.offset_known_p = true;
2514 attrs.offset = 0;
2515 if (GET_CODE (addr) == PLUS
2516 && CONST_INT_P (XEXP (addr, 1)))
2517 attrs.offset = INTVAL (XEXP (addr, 1));
2519 set_mem_attrs (mem, &attrs);
2520 MEM_NOTRAP_P (mem) = 1;
2523 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2525 rtx_code_label *
2526 gen_label_rtx (void)
2528 return as_a <rtx_code_label *> (
2529 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2530 NULL, label_num++, NULL));
2533 /* For procedure integration. */
2535 /* Install new pointers to the first and last insns in the chain.
2536 Also, set cur_insn_uid to one higher than the last in use.
2537 Used for an inline-procedure after copying the insn chain. */
2539 void
2540 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2542 rtx_insn *insn;
2544 set_first_insn (first);
2545 set_last_insn (last);
2546 cur_insn_uid = 0;
2548 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2550 int debug_count = 0;
2552 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2553 cur_debug_insn_uid = 0;
2555 for (insn = first; insn; insn = NEXT_INSN (insn))
2556 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2557 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2558 else
2560 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2561 if (DEBUG_INSN_P (insn))
2562 debug_count++;
2565 if (debug_count)
2566 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2567 else
2568 cur_debug_insn_uid++;
2570 else
2571 for (insn = first; insn; insn = NEXT_INSN (insn))
2572 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2574 cur_insn_uid++;
2577 /* Go through all the RTL insn bodies and copy any invalid shared
2578 structure. This routine should only be called once. */
2580 static void
2581 unshare_all_rtl_1 (rtx_insn *insn)
2583 /* Unshare just about everything else. */
2584 unshare_all_rtl_in_chain (insn);
2586 /* Make sure the addresses of stack slots found outside the insn chain
2587 (such as, in DECL_RTL of a variable) are not shared
2588 with the insn chain.
2590 This special care is necessary when the stack slot MEM does not
2591 actually appear in the insn chain. If it does appear, its address
2592 is unshared from all else at that point. */
2593 stack_slot_list = safe_as_a <rtx_expr_list *> (
2594 copy_rtx_if_shared (stack_slot_list));
2597 /* Go through all the RTL insn bodies and copy any invalid shared
2598 structure, again. This is a fairly expensive thing to do so it
2599 should be done sparingly. */
2601 void
2602 unshare_all_rtl_again (rtx_insn *insn)
2604 rtx_insn *p;
2605 tree decl;
2607 for (p = insn; p; p = NEXT_INSN (p))
2608 if (INSN_P (p))
2610 reset_used_flags (PATTERN (p));
2611 reset_used_flags (REG_NOTES (p));
2612 if (CALL_P (p))
2613 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2616 /* Make sure that virtual stack slots are not shared. */
2617 set_used_decls (DECL_INITIAL (cfun->decl));
2619 /* Make sure that virtual parameters are not shared. */
2620 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2621 set_used_flags (DECL_RTL (decl));
2623 reset_used_flags (stack_slot_list);
2625 unshare_all_rtl_1 (insn);
2628 unsigned int
2629 unshare_all_rtl (void)
2631 unshare_all_rtl_1 (get_insns ());
2632 return 0;
2636 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2637 Recursively does the same for subexpressions. */
2639 static void
2640 verify_rtx_sharing (rtx orig, rtx insn)
2642 rtx x = orig;
2643 int i;
2644 enum rtx_code code;
2645 const char *format_ptr;
2647 if (x == 0)
2648 return;
2650 code = GET_CODE (x);
2652 /* These types may be freely shared. */
2654 switch (code)
2656 case REG:
2657 case DEBUG_EXPR:
2658 case VALUE:
2659 CASE_CONST_ANY:
2660 case SYMBOL_REF:
2661 case LABEL_REF:
2662 case CODE_LABEL:
2663 case PC:
2664 case CC0:
2665 case RETURN:
2666 case SIMPLE_RETURN:
2667 case SCRATCH:
2668 /* SCRATCH must be shared because they represent distinct values. */
2669 return;
2670 case CLOBBER:
2671 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2672 clobbers or clobbers of hard registers that originated as pseudos.
2673 This is needed to allow safe register renaming. */
2674 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2675 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2676 return;
2677 break;
2679 case CONST:
2680 if (shared_const_p (orig))
2681 return;
2682 break;
2684 case MEM:
2685 /* A MEM is allowed to be shared if its address is constant. */
2686 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2687 || reload_completed || reload_in_progress)
2688 return;
2690 break;
2692 default:
2693 break;
2696 /* This rtx may not be shared. If it has already been seen,
2697 replace it with a copy of itself. */
2698 #ifdef ENABLE_CHECKING
2699 if (RTX_FLAG (x, used))
2701 error ("invalid rtl sharing found in the insn");
2702 debug_rtx (insn);
2703 error ("shared rtx");
2704 debug_rtx (x);
2705 internal_error ("internal consistency failure");
2707 #endif
2708 gcc_assert (!RTX_FLAG (x, used));
2710 RTX_FLAG (x, used) = 1;
2712 /* Now scan the subexpressions recursively. */
2714 format_ptr = GET_RTX_FORMAT (code);
2716 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2718 switch (*format_ptr++)
2720 case 'e':
2721 verify_rtx_sharing (XEXP (x, i), insn);
2722 break;
2724 case 'E':
2725 if (XVEC (x, i) != NULL)
2727 int j;
2728 int len = XVECLEN (x, i);
2730 for (j = 0; j < len; j++)
2732 /* We allow sharing of ASM_OPERANDS inside single
2733 instruction. */
2734 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2735 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2736 == ASM_OPERANDS))
2737 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2738 else
2739 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2742 break;
2745 return;
2748 /* Reset used-flags for INSN. */
2750 static void
2751 reset_insn_used_flags (rtx insn)
2753 gcc_assert (INSN_P (insn));
2754 reset_used_flags (PATTERN (insn));
2755 reset_used_flags (REG_NOTES (insn));
2756 if (CALL_P (insn))
2757 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2760 /* Go through all the RTL insn bodies and clear all the USED bits. */
2762 static void
2763 reset_all_used_flags (void)
2765 rtx_insn *p;
2767 for (p = get_insns (); p; p = NEXT_INSN (p))
2768 if (INSN_P (p))
2770 rtx pat = PATTERN (p);
2771 if (GET_CODE (pat) != SEQUENCE)
2772 reset_insn_used_flags (p);
2773 else
2775 gcc_assert (REG_NOTES (p) == NULL);
2776 for (int i = 0; i < XVECLEN (pat, 0); i++)
2778 rtx insn = XVECEXP (pat, 0, i);
2779 if (INSN_P (insn))
2780 reset_insn_used_flags (insn);
2786 /* Verify sharing in INSN. */
2788 static void
2789 verify_insn_sharing (rtx insn)
2791 gcc_assert (INSN_P (insn));
2792 reset_used_flags (PATTERN (insn));
2793 reset_used_flags (REG_NOTES (insn));
2794 if (CALL_P (insn))
2795 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2798 /* Go through all the RTL insn bodies and check that there is no unexpected
2799 sharing in between the subexpressions. */
2801 DEBUG_FUNCTION void
2802 verify_rtl_sharing (void)
2804 rtx_insn *p;
2806 timevar_push (TV_VERIFY_RTL_SHARING);
2808 reset_all_used_flags ();
2810 for (p = get_insns (); p; p = NEXT_INSN (p))
2811 if (INSN_P (p))
2813 rtx pat = PATTERN (p);
2814 if (GET_CODE (pat) != SEQUENCE)
2815 verify_insn_sharing (p);
2816 else
2817 for (int i = 0; i < XVECLEN (pat, 0); i++)
2819 rtx insn = XVECEXP (pat, 0, i);
2820 if (INSN_P (insn))
2821 verify_insn_sharing (insn);
2825 reset_all_used_flags ();
2827 timevar_pop (TV_VERIFY_RTL_SHARING);
2830 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2831 Assumes the mark bits are cleared at entry. */
2833 void
2834 unshare_all_rtl_in_chain (rtx_insn *insn)
2836 for (; insn; insn = NEXT_INSN (insn))
2837 if (INSN_P (insn))
2839 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2840 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2841 if (CALL_P (insn))
2842 CALL_INSN_FUNCTION_USAGE (insn)
2843 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2847 /* Go through all virtual stack slots of a function and mark them as
2848 shared. We never replace the DECL_RTLs themselves with a copy,
2849 but expressions mentioned into a DECL_RTL cannot be shared with
2850 expressions in the instruction stream.
2852 Note that reload may convert pseudo registers into memories in-place.
2853 Pseudo registers are always shared, but MEMs never are. Thus if we
2854 reset the used flags on MEMs in the instruction stream, we must set
2855 them again on MEMs that appear in DECL_RTLs. */
2857 static void
2858 set_used_decls (tree blk)
2860 tree t;
2862 /* Mark decls. */
2863 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2864 if (DECL_RTL_SET_P (t))
2865 set_used_flags (DECL_RTL (t));
2867 /* Now process sub-blocks. */
2868 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2869 set_used_decls (t);
2872 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2873 Recursively does the same for subexpressions. Uses
2874 copy_rtx_if_shared_1 to reduce stack space. */
2877 copy_rtx_if_shared (rtx orig)
2879 copy_rtx_if_shared_1 (&orig);
2880 return orig;
2883 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2884 use. Recursively does the same for subexpressions. */
2886 static void
2887 copy_rtx_if_shared_1 (rtx *orig1)
2889 rtx x;
2890 int i;
2891 enum rtx_code code;
2892 rtx *last_ptr;
2893 const char *format_ptr;
2894 int copied = 0;
2895 int length;
2897 /* Repeat is used to turn tail-recursion into iteration. */
2898 repeat:
2899 x = *orig1;
2901 if (x == 0)
2902 return;
2904 code = GET_CODE (x);
2906 /* These types may be freely shared. */
2908 switch (code)
2910 case REG:
2911 case DEBUG_EXPR:
2912 case VALUE:
2913 CASE_CONST_ANY:
2914 case SYMBOL_REF:
2915 case LABEL_REF:
2916 case CODE_LABEL:
2917 case PC:
2918 case CC0:
2919 case RETURN:
2920 case SIMPLE_RETURN:
2921 case SCRATCH:
2922 /* SCRATCH must be shared because they represent distinct values. */
2923 return;
2924 case CLOBBER:
2925 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2926 clobbers or clobbers of hard registers that originated as pseudos.
2927 This is needed to allow safe register renaming. */
2928 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2929 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2930 return;
2931 break;
2933 case CONST:
2934 if (shared_const_p (x))
2935 return;
2936 break;
2938 case DEBUG_INSN:
2939 case INSN:
2940 case JUMP_INSN:
2941 case CALL_INSN:
2942 case NOTE:
2943 case BARRIER:
2944 /* The chain of insns is not being copied. */
2945 return;
2947 default:
2948 break;
2951 /* This rtx may not be shared. If it has already been seen,
2952 replace it with a copy of itself. */
2954 if (RTX_FLAG (x, used))
2956 x = shallow_copy_rtx (x);
2957 copied = 1;
2959 RTX_FLAG (x, used) = 1;
2961 /* Now scan the subexpressions recursively.
2962 We can store any replaced subexpressions directly into X
2963 since we know X is not shared! Any vectors in X
2964 must be copied if X was copied. */
2966 format_ptr = GET_RTX_FORMAT (code);
2967 length = GET_RTX_LENGTH (code);
2968 last_ptr = NULL;
2970 for (i = 0; i < length; i++)
2972 switch (*format_ptr++)
2974 case 'e':
2975 if (last_ptr)
2976 copy_rtx_if_shared_1 (last_ptr);
2977 last_ptr = &XEXP (x, i);
2978 break;
2980 case 'E':
2981 if (XVEC (x, i) != NULL)
2983 int j;
2984 int len = XVECLEN (x, i);
2986 /* Copy the vector iff I copied the rtx and the length
2987 is nonzero. */
2988 if (copied && len > 0)
2989 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2991 /* Call recursively on all inside the vector. */
2992 for (j = 0; j < len; j++)
2994 if (last_ptr)
2995 copy_rtx_if_shared_1 (last_ptr);
2996 last_ptr = &XVECEXP (x, i, j);
2999 break;
3002 *orig1 = x;
3003 if (last_ptr)
3005 orig1 = last_ptr;
3006 goto repeat;
3008 return;
3011 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3013 static void
3014 mark_used_flags (rtx x, int flag)
3016 int i, j;
3017 enum rtx_code code;
3018 const char *format_ptr;
3019 int length;
3021 /* Repeat is used to turn tail-recursion into iteration. */
3022 repeat:
3023 if (x == 0)
3024 return;
3026 code = GET_CODE (x);
3028 /* These types may be freely shared so we needn't do any resetting
3029 for them. */
3031 switch (code)
3033 case REG:
3034 case DEBUG_EXPR:
3035 case VALUE:
3036 CASE_CONST_ANY:
3037 case SYMBOL_REF:
3038 case CODE_LABEL:
3039 case PC:
3040 case CC0:
3041 case RETURN:
3042 case SIMPLE_RETURN:
3043 return;
3045 case DEBUG_INSN:
3046 case INSN:
3047 case JUMP_INSN:
3048 case CALL_INSN:
3049 case NOTE:
3050 case LABEL_REF:
3051 case BARRIER:
3052 /* The chain of insns is not being copied. */
3053 return;
3055 default:
3056 break;
3059 RTX_FLAG (x, used) = flag;
3061 format_ptr = GET_RTX_FORMAT (code);
3062 length = GET_RTX_LENGTH (code);
3064 for (i = 0; i < length; i++)
3066 switch (*format_ptr++)
3068 case 'e':
3069 if (i == length-1)
3071 x = XEXP (x, i);
3072 goto repeat;
3074 mark_used_flags (XEXP (x, i), flag);
3075 break;
3077 case 'E':
3078 for (j = 0; j < XVECLEN (x, i); j++)
3079 mark_used_flags (XVECEXP (x, i, j), flag);
3080 break;
3085 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3086 to look for shared sub-parts. */
3088 void
3089 reset_used_flags (rtx x)
3091 mark_used_flags (x, 0);
3094 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3095 to look for shared sub-parts. */
3097 void
3098 set_used_flags (rtx x)
3100 mark_used_flags (x, 1);
3103 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3104 Return X or the rtx for the pseudo reg the value of X was copied into.
3105 OTHER must be valid as a SET_DEST. */
3108 make_safe_from (rtx x, rtx other)
3110 while (1)
3111 switch (GET_CODE (other))
3113 case SUBREG:
3114 other = SUBREG_REG (other);
3115 break;
3116 case STRICT_LOW_PART:
3117 case SIGN_EXTEND:
3118 case ZERO_EXTEND:
3119 other = XEXP (other, 0);
3120 break;
3121 default:
3122 goto done;
3124 done:
3125 if ((MEM_P (other)
3126 && ! CONSTANT_P (x)
3127 && !REG_P (x)
3128 && GET_CODE (x) != SUBREG)
3129 || (REG_P (other)
3130 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3131 || reg_mentioned_p (other, x))))
3133 rtx temp = gen_reg_rtx (GET_MODE (x));
3134 emit_move_insn (temp, x);
3135 return temp;
3137 return x;
3140 /* Emission of insns (adding them to the doubly-linked list). */
3142 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3144 rtx_insn *
3145 get_last_insn_anywhere (void)
3147 struct sequence_stack *stack;
3148 if (get_last_insn ())
3149 return get_last_insn ();
3150 for (stack = seq_stack; stack; stack = stack->next)
3151 if (stack->last != 0)
3152 return stack->last;
3153 return 0;
3156 /* Return the first nonnote insn emitted in current sequence or current
3157 function. This routine looks inside SEQUENCEs. */
3159 rtx_insn *
3160 get_first_nonnote_insn (void)
3162 rtx_insn *insn = get_insns ();
3164 if (insn)
3166 if (NOTE_P (insn))
3167 for (insn = next_insn (insn);
3168 insn && NOTE_P (insn);
3169 insn = next_insn (insn))
3170 continue;
3171 else
3173 if (NONJUMP_INSN_P (insn)
3174 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3175 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3179 return insn;
3182 /* Return the last nonnote insn emitted in current sequence or current
3183 function. This routine looks inside SEQUENCEs. */
3185 rtx_insn *
3186 get_last_nonnote_insn (void)
3188 rtx_insn *insn = get_last_insn ();
3190 if (insn)
3192 if (NOTE_P (insn))
3193 for (insn = previous_insn (insn);
3194 insn && NOTE_P (insn);
3195 insn = previous_insn (insn))
3196 continue;
3197 else
3199 if (NONJUMP_INSN_P (insn))
3200 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3201 insn = seq->insn (seq->len () - 1);
3205 return insn;
3208 /* Return the number of actual (non-debug) insns emitted in this
3209 function. */
3212 get_max_insn_count (void)
3214 int n = cur_insn_uid;
3216 /* The table size must be stable across -g, to avoid codegen
3217 differences due to debug insns, and not be affected by
3218 -fmin-insn-uid, to avoid excessive table size and to simplify
3219 debugging of -fcompare-debug failures. */
3220 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3221 n -= cur_debug_insn_uid;
3222 else
3223 n -= MIN_NONDEBUG_INSN_UID;
3225 return n;
3229 /* Return the next insn. If it is a SEQUENCE, return the first insn
3230 of the sequence. */
3232 rtx_insn *
3233 next_insn (rtx_insn *insn)
3235 if (insn)
3237 insn = NEXT_INSN (insn);
3238 if (insn && NONJUMP_INSN_P (insn)
3239 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3240 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3243 return insn;
3246 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3247 of the sequence. */
3249 rtx_insn *
3250 previous_insn (rtx_insn *insn)
3252 if (insn)
3254 insn = PREV_INSN (insn);
3255 if (insn && NONJUMP_INSN_P (insn))
3256 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3257 insn = seq->insn (seq->len () - 1);
3260 return insn;
3263 /* Return the next insn after INSN that is not a NOTE. This routine does not
3264 look inside SEQUENCEs. */
3266 rtx_insn *
3267 next_nonnote_insn (rtx uncast_insn)
3269 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3270 while (insn)
3272 insn = NEXT_INSN (insn);
3273 if (insn == 0 || !NOTE_P (insn))
3274 break;
3277 return insn;
3280 /* Return the next insn after INSN that is not a NOTE, but stop the
3281 search before we enter another basic block. This routine does not
3282 look inside SEQUENCEs. */
3284 rtx_insn *
3285 next_nonnote_insn_bb (rtx_insn *insn)
3287 while (insn)
3289 insn = NEXT_INSN (insn);
3290 if (insn == 0 || !NOTE_P (insn))
3291 break;
3292 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3293 return NULL;
3296 return insn;
3299 /* Return the previous insn before INSN that is not a NOTE. This routine does
3300 not look inside SEQUENCEs. */
3302 rtx_insn *
3303 prev_nonnote_insn (rtx uncast_insn)
3305 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3307 while (insn)
3309 insn = PREV_INSN (insn);
3310 if (insn == 0 || !NOTE_P (insn))
3311 break;
3314 return insn;
3317 /* Return the previous insn before INSN that is not a NOTE, but stop
3318 the search before we enter another basic block. This routine does
3319 not look inside SEQUENCEs. */
3321 rtx_insn *
3322 prev_nonnote_insn_bb (rtx uncast_insn)
3324 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3326 while (insn)
3328 insn = PREV_INSN (insn);
3329 if (insn == 0 || !NOTE_P (insn))
3330 break;
3331 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3332 return NULL;
3335 return insn;
3338 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3339 routine does not look inside SEQUENCEs. */
3341 rtx_insn *
3342 next_nondebug_insn (rtx uncast_insn)
3344 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3346 while (insn)
3348 insn = NEXT_INSN (insn);
3349 if (insn == 0 || !DEBUG_INSN_P (insn))
3350 break;
3353 return insn;
3356 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3357 This routine does not look inside SEQUENCEs. */
3359 rtx_insn *
3360 prev_nondebug_insn (rtx uncast_insn)
3362 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3364 while (insn)
3366 insn = PREV_INSN (insn);
3367 if (insn == 0 || !DEBUG_INSN_P (insn))
3368 break;
3371 return insn;
3374 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3375 This routine does not look inside SEQUENCEs. */
3377 rtx_insn *
3378 next_nonnote_nondebug_insn (rtx uncast_insn)
3380 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3382 while (insn)
3384 insn = NEXT_INSN (insn);
3385 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3386 break;
3389 return insn;
3392 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3393 This routine does not look inside SEQUENCEs. */
3395 rtx_insn *
3396 prev_nonnote_nondebug_insn (rtx uncast_insn)
3398 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3400 while (insn)
3402 insn = PREV_INSN (insn);
3403 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3404 break;
3407 return insn;
3410 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3411 or 0, if there is none. This routine does not look inside
3412 SEQUENCEs. */
3414 rtx_insn *
3415 next_real_insn (rtx uncast_insn)
3417 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3419 while (insn)
3421 insn = NEXT_INSN (insn);
3422 if (insn == 0 || INSN_P (insn))
3423 break;
3426 return insn;
3429 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3430 or 0, if there is none. This routine does not look inside
3431 SEQUENCEs. */
3433 rtx_insn *
3434 prev_real_insn (rtx uncast_insn)
3436 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3438 while (insn)
3440 insn = PREV_INSN (insn);
3441 if (insn == 0 || INSN_P (insn))
3442 break;
3445 return insn;
3448 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3449 This routine does not look inside SEQUENCEs. */
3451 rtx_call_insn *
3452 last_call_insn (void)
3454 rtx_insn *insn;
3456 for (insn = get_last_insn ();
3457 insn && !CALL_P (insn);
3458 insn = PREV_INSN (insn))
3461 return safe_as_a <rtx_call_insn *> (insn);
3464 /* Find the next insn after INSN that really does something. This routine
3465 does not look inside SEQUENCEs. After reload this also skips over
3466 standalone USE and CLOBBER insn. */
3469 active_insn_p (const_rtx insn)
3471 return (CALL_P (insn) || JUMP_P (insn)
3472 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3473 || (NONJUMP_INSN_P (insn)
3474 && (! reload_completed
3475 || (GET_CODE (PATTERN (insn)) != USE
3476 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3479 rtx_insn *
3480 next_active_insn (rtx uncast_insn)
3482 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3484 while (insn)
3486 insn = NEXT_INSN (insn);
3487 if (insn == 0 || active_insn_p (insn))
3488 break;
3491 return insn;
3494 /* Find the last insn before INSN that really does something. This routine
3495 does not look inside SEQUENCEs. After reload this also skips over
3496 standalone USE and CLOBBER insn. */
3498 rtx_insn *
3499 prev_active_insn (rtx uncast_insn)
3501 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3503 while (insn)
3505 insn = PREV_INSN (insn);
3506 if (insn == 0 || active_insn_p (insn))
3507 break;
3510 return insn;
3513 #ifdef HAVE_cc0
3514 /* Return the next insn that uses CC0 after INSN, which is assumed to
3515 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3516 applied to the result of this function should yield INSN).
3518 Normally, this is simply the next insn. However, if a REG_CC_USER note
3519 is present, it contains the insn that uses CC0.
3521 Return 0 if we can't find the insn. */
3523 rtx_insn *
3524 next_cc0_user (rtx uncast_insn)
3526 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3528 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3530 if (note)
3531 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3533 insn = next_nonnote_insn (insn);
3534 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3535 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3537 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3538 return insn;
3540 return 0;
3543 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3544 note, it is the previous insn. */
3546 rtx_insn *
3547 prev_cc0_setter (rtx uncast_insn)
3549 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3551 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3553 if (note)
3554 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3556 insn = prev_nonnote_insn (insn);
3557 gcc_assert (sets_cc0_p (PATTERN (insn)));
3559 return insn;
3561 #endif
3563 #ifdef AUTO_INC_DEC
3564 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3566 static int
3567 find_auto_inc (const_rtx x, const_rtx reg)
3569 subrtx_iterator::array_type array;
3570 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3572 const_rtx x = *iter;
3573 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3574 && rtx_equal_p (reg, XEXP (x, 0)))
3575 return true;
3577 return false;
3579 #endif
3581 /* Increment the label uses for all labels present in rtx. */
3583 static void
3584 mark_label_nuses (rtx x)
3586 enum rtx_code code;
3587 int i, j;
3588 const char *fmt;
3590 code = GET_CODE (x);
3591 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3592 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3594 fmt = GET_RTX_FORMAT (code);
3595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3597 if (fmt[i] == 'e')
3598 mark_label_nuses (XEXP (x, i));
3599 else if (fmt[i] == 'E')
3600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3601 mark_label_nuses (XVECEXP (x, i, j));
3606 /* Try splitting insns that can be split for better scheduling.
3607 PAT is the pattern which might split.
3608 TRIAL is the insn providing PAT.
3609 LAST is nonzero if we should return the last insn of the sequence produced.
3611 If this routine succeeds in splitting, it returns the first or last
3612 replacement insn depending on the value of LAST. Otherwise, it
3613 returns TRIAL. If the insn to be returned can be split, it will be. */
3615 rtx_insn *
3616 try_split (rtx pat, rtx uncast_trial, int last)
3618 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3619 rtx_insn *before = PREV_INSN (trial);
3620 rtx_insn *after = NEXT_INSN (trial);
3621 rtx note;
3622 rtx_insn *seq, *tem;
3623 int probability;
3624 rtx_insn *insn_last, *insn;
3625 int njumps = 0;
3626 rtx call_insn = NULL_RTX;
3628 /* We're not good at redistributing frame information. */
3629 if (RTX_FRAME_RELATED_P (trial))
3630 return trial;
3632 if (any_condjump_p (trial)
3633 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3634 split_branch_probability = XINT (note, 0);
3635 probability = split_branch_probability;
3637 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3639 split_branch_probability = -1;
3641 if (!seq)
3642 return trial;
3644 /* Avoid infinite loop if any insn of the result matches
3645 the original pattern. */
3646 insn_last = seq;
3647 while (1)
3649 if (INSN_P (insn_last)
3650 && rtx_equal_p (PATTERN (insn_last), pat))
3651 return trial;
3652 if (!NEXT_INSN (insn_last))
3653 break;
3654 insn_last = NEXT_INSN (insn_last);
3657 /* We will be adding the new sequence to the function. The splitters
3658 may have introduced invalid RTL sharing, so unshare the sequence now. */
3659 unshare_all_rtl_in_chain (seq);
3661 /* Mark labels and copy flags. */
3662 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3664 if (JUMP_P (insn))
3666 if (JUMP_P (trial))
3667 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3668 mark_jump_label (PATTERN (insn), insn, 0);
3669 njumps++;
3670 if (probability != -1
3671 && any_condjump_p (insn)
3672 && !find_reg_note (insn, REG_BR_PROB, 0))
3674 /* We can preserve the REG_BR_PROB notes only if exactly
3675 one jump is created, otherwise the machine description
3676 is responsible for this step using
3677 split_branch_probability variable. */
3678 gcc_assert (njumps == 1);
3679 add_int_reg_note (insn, REG_BR_PROB, probability);
3684 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3685 in SEQ and copy any additional information across. */
3686 if (CALL_P (trial))
3688 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3689 if (CALL_P (insn))
3691 rtx_insn *next;
3692 rtx *p;
3694 gcc_assert (call_insn == NULL_RTX);
3695 call_insn = insn;
3697 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3698 target may have explicitly specified. */
3699 p = &CALL_INSN_FUNCTION_USAGE (insn);
3700 while (*p)
3701 p = &XEXP (*p, 1);
3702 *p = CALL_INSN_FUNCTION_USAGE (trial);
3704 /* If the old call was a sibling call, the new one must
3705 be too. */
3706 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3708 /* If the new call is the last instruction in the sequence,
3709 it will effectively replace the old call in-situ. Otherwise
3710 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3711 so that it comes immediately after the new call. */
3712 if (NEXT_INSN (insn))
3713 for (next = NEXT_INSN (trial);
3714 next && NOTE_P (next);
3715 next = NEXT_INSN (next))
3716 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3718 remove_insn (next);
3719 add_insn_after (next, insn, NULL);
3720 break;
3725 /* Copy notes, particularly those related to the CFG. */
3726 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3728 switch (REG_NOTE_KIND (note))
3730 case REG_EH_REGION:
3731 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3732 break;
3734 case REG_NORETURN:
3735 case REG_SETJMP:
3736 case REG_TM:
3737 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3739 if (CALL_P (insn))
3740 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3742 break;
3744 case REG_NON_LOCAL_GOTO:
3745 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3747 if (JUMP_P (insn))
3748 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3750 break;
3752 #ifdef AUTO_INC_DEC
3753 case REG_INC:
3754 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3756 rtx reg = XEXP (note, 0);
3757 if (!FIND_REG_INC_NOTE (insn, reg)
3758 && find_auto_inc (PATTERN (insn), reg))
3759 add_reg_note (insn, REG_INC, reg);
3761 break;
3762 #endif
3764 case REG_ARGS_SIZE:
3765 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3766 break;
3768 case REG_CALL_DECL:
3769 gcc_assert (call_insn != NULL_RTX);
3770 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3771 break;
3773 default:
3774 break;
3778 /* If there are LABELS inside the split insns increment the
3779 usage count so we don't delete the label. */
3780 if (INSN_P (trial))
3782 insn = insn_last;
3783 while (insn != NULL_RTX)
3785 /* JUMP_P insns have already been "marked" above. */
3786 if (NONJUMP_INSN_P (insn))
3787 mark_label_nuses (PATTERN (insn));
3789 insn = PREV_INSN (insn);
3793 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3795 delete_insn (trial);
3797 /* Recursively call try_split for each new insn created; by the
3798 time control returns here that insn will be fully split, so
3799 set LAST and continue from the insn after the one returned.
3800 We can't use next_active_insn here since AFTER may be a note.
3801 Ignore deleted insns, which can be occur if not optimizing. */
3802 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3803 if (! tem->deleted () && INSN_P (tem))
3804 tem = try_split (PATTERN (tem), tem, 1);
3806 /* Return either the first or the last insn, depending on which was
3807 requested. */
3808 return last
3809 ? (after ? PREV_INSN (after) : get_last_insn ())
3810 : NEXT_INSN (before);
3813 /* Make and return an INSN rtx, initializing all its slots.
3814 Store PATTERN in the pattern slots. */
3816 rtx_insn *
3817 make_insn_raw (rtx pattern)
3819 rtx_insn *insn;
3821 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3823 INSN_UID (insn) = cur_insn_uid++;
3824 PATTERN (insn) = pattern;
3825 INSN_CODE (insn) = -1;
3826 REG_NOTES (insn) = NULL;
3827 INSN_LOCATION (insn) = curr_insn_location ();
3828 BLOCK_FOR_INSN (insn) = NULL;
3830 #ifdef ENABLE_RTL_CHECKING
3831 if (insn
3832 && INSN_P (insn)
3833 && (returnjump_p (insn)
3834 || (GET_CODE (insn) == SET
3835 && SET_DEST (insn) == pc_rtx)))
3837 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3838 debug_rtx (insn);
3840 #endif
3842 return insn;
3845 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3847 static rtx_insn *
3848 make_debug_insn_raw (rtx pattern)
3850 rtx_debug_insn *insn;
3852 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3853 INSN_UID (insn) = cur_debug_insn_uid++;
3854 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3855 INSN_UID (insn) = cur_insn_uid++;
3857 PATTERN (insn) = pattern;
3858 INSN_CODE (insn) = -1;
3859 REG_NOTES (insn) = NULL;
3860 INSN_LOCATION (insn) = curr_insn_location ();
3861 BLOCK_FOR_INSN (insn) = NULL;
3863 return insn;
3866 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3868 static rtx_insn *
3869 make_jump_insn_raw (rtx pattern)
3871 rtx_jump_insn *insn;
3873 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3874 INSN_UID (insn) = cur_insn_uid++;
3876 PATTERN (insn) = pattern;
3877 INSN_CODE (insn) = -1;
3878 REG_NOTES (insn) = NULL;
3879 JUMP_LABEL (insn) = NULL;
3880 INSN_LOCATION (insn) = curr_insn_location ();
3881 BLOCK_FOR_INSN (insn) = NULL;
3883 return insn;
3886 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3888 static rtx_insn *
3889 make_call_insn_raw (rtx pattern)
3891 rtx_call_insn *insn;
3893 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3894 INSN_UID (insn) = cur_insn_uid++;
3896 PATTERN (insn) = pattern;
3897 INSN_CODE (insn) = -1;
3898 REG_NOTES (insn) = NULL;
3899 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3900 INSN_LOCATION (insn) = curr_insn_location ();
3901 BLOCK_FOR_INSN (insn) = NULL;
3903 return insn;
3906 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3908 static rtx_note *
3909 make_note_raw (enum insn_note subtype)
3911 /* Some notes are never created this way at all. These notes are
3912 only created by patching out insns. */
3913 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3914 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3916 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3917 INSN_UID (note) = cur_insn_uid++;
3918 NOTE_KIND (note) = subtype;
3919 BLOCK_FOR_INSN (note) = NULL;
3920 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3921 return note;
3924 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3925 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3926 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3928 static inline void
3929 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3931 SET_PREV_INSN (insn) = prev;
3932 SET_NEXT_INSN (insn) = next;
3933 if (prev != NULL)
3935 SET_NEXT_INSN (prev) = insn;
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3938 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3939 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3942 if (next != NULL)
3944 SET_PREV_INSN (next) = insn;
3945 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3947 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3948 SET_PREV_INSN (sequence->insn (0)) = insn;
3952 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3955 SET_PREV_INSN (sequence->insn (0)) = prev;
3956 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3960 /* Add INSN to the end of the doubly-linked list.
3961 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3963 void
3964 add_insn (rtx_insn *insn)
3966 rtx_insn *prev = get_last_insn ();
3967 link_insn_into_chain (insn, prev, NULL);
3968 if (NULL == get_insns ())
3969 set_first_insn (insn);
3970 set_last_insn (insn);
3973 /* Add INSN into the doubly-linked list after insn AFTER. */
3975 static void
3976 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3978 rtx_insn *next = NEXT_INSN (after);
3980 gcc_assert (!optimize || !after->deleted ());
3982 link_insn_into_chain (insn, after, next);
3984 if (next == NULL)
3986 if (get_last_insn () == after)
3987 set_last_insn (insn);
3988 else
3990 struct sequence_stack *stack = seq_stack;
3991 /* Scan all pending sequences too. */
3992 for (; stack; stack = stack->next)
3993 if (after == stack->last)
3995 stack->last = insn;
3996 break;
4002 /* Add INSN into the doubly-linked list before insn BEFORE. */
4004 static void
4005 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4007 rtx_insn *prev = PREV_INSN (before);
4009 gcc_assert (!optimize || !before->deleted ());
4011 link_insn_into_chain (insn, prev, before);
4013 if (prev == NULL)
4015 if (get_insns () == before)
4016 set_first_insn (insn);
4017 else
4019 struct sequence_stack *stack = seq_stack;
4020 /* Scan all pending sequences too. */
4021 for (; stack; stack = stack->next)
4022 if (before == stack->first)
4024 stack->first = insn;
4025 break;
4028 gcc_assert (stack);
4033 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4034 If BB is NULL, an attempt is made to infer the bb from before.
4036 This and the next function should be the only functions called
4037 to insert an insn once delay slots have been filled since only
4038 they know how to update a SEQUENCE. */
4040 void
4041 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4043 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4044 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4045 add_insn_after_nobb (insn, after);
4046 if (!BARRIER_P (after)
4047 && !BARRIER_P (insn)
4048 && (bb = BLOCK_FOR_INSN (after)))
4050 set_block_for_insn (insn, bb);
4051 if (INSN_P (insn))
4052 df_insn_rescan (insn);
4053 /* Should not happen as first in the BB is always
4054 either NOTE or LABEL. */
4055 if (BB_END (bb) == after
4056 /* Avoid clobbering of structure when creating new BB. */
4057 && !BARRIER_P (insn)
4058 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4059 BB_END (bb) = insn;
4063 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4066 This and the previous function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
4070 void
4071 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4075 add_insn_before_nobb (insn, before);
4077 if (!bb
4078 && !BARRIER_P (before)
4079 && !BARRIER_P (insn))
4080 bb = BLOCK_FOR_INSN (before);
4082 if (bb)
4084 set_block_for_insn (insn, bb);
4085 if (INSN_P (insn))
4086 df_insn_rescan (insn);
4087 /* Should not happen as first in the BB is always either NOTE or
4088 LABEL. */
4089 gcc_assert (BB_HEAD (bb) != insn
4090 /* Avoid clobbering of structure when creating new BB. */
4091 || BARRIER_P (insn)
4092 || NOTE_INSN_BASIC_BLOCK_P (insn));
4096 /* Replace insn with an deleted instruction note. */
4098 void
4099 set_insn_deleted (rtx insn)
4101 if (INSN_P (insn))
4102 df_insn_delete (as_a <rtx_insn *> (insn));
4103 PUT_CODE (insn, NOTE);
4104 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4108 /* Unlink INSN from the insn chain.
4110 This function knows how to handle sequences.
4112 This function does not invalidate data flow information associated with
4113 INSN (i.e. does not call df_insn_delete). That makes this function
4114 usable for only disconnecting an insn from the chain, and re-emit it
4115 elsewhere later.
4117 To later insert INSN elsewhere in the insn chain via add_insn and
4118 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4119 the caller. Nullifying them here breaks many insn chain walks.
4121 To really delete an insn and related DF information, use delete_insn. */
4123 void
4124 remove_insn (rtx uncast_insn)
4126 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4127 rtx_insn *next = NEXT_INSN (insn);
4128 rtx_insn *prev = PREV_INSN (insn);
4129 basic_block bb;
4131 if (prev)
4133 SET_NEXT_INSN (prev) = next;
4134 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4136 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4137 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4140 else if (get_insns () == insn)
4142 if (next)
4143 SET_PREV_INSN (next) = NULL;
4144 set_first_insn (next);
4146 else
4148 struct sequence_stack *stack = seq_stack;
4149 /* Scan all pending sequences too. */
4150 for (; stack; stack = stack->next)
4151 if (insn == stack->first)
4153 stack->first = next;
4154 break;
4157 gcc_assert (stack);
4160 if (next)
4162 SET_PREV_INSN (next) = prev;
4163 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4166 SET_PREV_INSN (sequence->insn (0)) = prev;
4169 else if (get_last_insn () == insn)
4170 set_last_insn (prev);
4171 else
4173 struct sequence_stack *stack = seq_stack;
4174 /* Scan all pending sequences too. */
4175 for (; stack; stack = stack->next)
4176 if (insn == stack->last)
4178 stack->last = prev;
4179 break;
4182 gcc_assert (stack);
4185 /* Fix up basic block boundaries, if necessary. */
4186 if (!BARRIER_P (insn)
4187 && (bb = BLOCK_FOR_INSN (insn)))
4189 if (BB_HEAD (bb) == insn)
4191 /* Never ever delete the basic block note without deleting whole
4192 basic block. */
4193 gcc_assert (!NOTE_P (insn));
4194 BB_HEAD (bb) = next;
4196 if (BB_END (bb) == insn)
4197 BB_END (bb) = prev;
4201 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4203 void
4204 add_function_usage_to (rtx call_insn, rtx call_fusage)
4206 gcc_assert (call_insn && CALL_P (call_insn));
4208 /* Put the register usage information on the CALL. If there is already
4209 some usage information, put ours at the end. */
4210 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4212 rtx link;
4214 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4215 link = XEXP (link, 1))
4218 XEXP (link, 1) = call_fusage;
4220 else
4221 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4224 /* Delete all insns made since FROM.
4225 FROM becomes the new last instruction. */
4227 void
4228 delete_insns_since (rtx_insn *from)
4230 if (from == 0)
4231 set_first_insn (0);
4232 else
4233 SET_NEXT_INSN (from) = 0;
4234 set_last_insn (from);
4237 /* This function is deprecated, please use sequences instead.
4239 Move a consecutive bunch of insns to a different place in the chain.
4240 The insns to be moved are those between FROM and TO.
4241 They are moved to a new position after the insn AFTER.
4242 AFTER must not be FROM or TO or any insn in between.
4244 This function does not know about SEQUENCEs and hence should not be
4245 called after delay-slot filling has been done. */
4247 void
4248 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4250 #ifdef ENABLE_CHECKING
4251 rtx_insn *x;
4252 for (x = from; x != to; x = NEXT_INSN (x))
4253 gcc_assert (after != x);
4254 gcc_assert (after != to);
4255 #endif
4257 /* Splice this bunch out of where it is now. */
4258 if (PREV_INSN (from))
4259 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4260 if (NEXT_INSN (to))
4261 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4262 if (get_last_insn () == to)
4263 set_last_insn (PREV_INSN (from));
4264 if (get_insns () == from)
4265 set_first_insn (NEXT_INSN (to));
4267 /* Make the new neighbors point to it and it to them. */
4268 if (NEXT_INSN (after))
4269 SET_PREV_INSN (NEXT_INSN (after)) = to;
4271 SET_NEXT_INSN (to) = NEXT_INSN (after);
4272 SET_PREV_INSN (from) = after;
4273 SET_NEXT_INSN (after) = from;
4274 if (after == get_last_insn ())
4275 set_last_insn (to);
4278 /* Same as function above, but take care to update BB boundaries. */
4279 void
4280 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4282 rtx_insn *prev = PREV_INSN (from);
4283 basic_block bb, bb2;
4285 reorder_insns_nobb (from, to, after);
4287 if (!BARRIER_P (after)
4288 && (bb = BLOCK_FOR_INSN (after)))
4290 rtx_insn *x;
4291 df_set_bb_dirty (bb);
4293 if (!BARRIER_P (from)
4294 && (bb2 = BLOCK_FOR_INSN (from)))
4296 if (BB_END (bb2) == to)
4297 BB_END (bb2) = prev;
4298 df_set_bb_dirty (bb2);
4301 if (BB_END (bb) == after)
4302 BB_END (bb) = to;
4304 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4305 if (!BARRIER_P (x))
4306 df_insn_change_bb (x, bb);
4311 /* Emit insn(s) of given code and pattern
4312 at a specified place within the doubly-linked list.
4314 All of the emit_foo global entry points accept an object
4315 X which is either an insn list or a PATTERN of a single
4316 instruction.
4318 There are thus a few canonical ways to generate code and
4319 emit it at a specific place in the instruction stream. For
4320 example, consider the instruction named SPOT and the fact that
4321 we would like to emit some instructions before SPOT. We might
4322 do it like this:
4324 start_sequence ();
4325 ... emit the new instructions ...
4326 insns_head = get_insns ();
4327 end_sequence ();
4329 emit_insn_before (insns_head, SPOT);
4331 It used to be common to generate SEQUENCE rtl instead, but that
4332 is a relic of the past which no longer occurs. The reason is that
4333 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4334 generated would almost certainly die right after it was created. */
4336 static rtx_insn *
4337 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4338 rtx_insn *(*make_raw) (rtx))
4340 rtx_insn *insn;
4342 gcc_assert (before);
4344 if (x == NULL_RTX)
4345 return safe_as_a <rtx_insn *> (last);
4347 switch (GET_CODE (x))
4349 case DEBUG_INSN:
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
4356 insn = as_a <rtx_insn *> (x);
4357 while (insn)
4359 rtx_insn *next = NEXT_INSN (insn);
4360 add_insn_before (insn, before, bb);
4361 last = insn;
4362 insn = next;
4364 break;
4366 #ifdef ENABLE_RTL_CHECKING
4367 case SEQUENCE:
4368 gcc_unreachable ();
4369 break;
4370 #endif
4372 default:
4373 last = (*make_raw) (x);
4374 add_insn_before (last, before, bb);
4375 break;
4378 return safe_as_a <rtx_insn *> (last);
4381 /* Make X be output before the instruction BEFORE. */
4383 rtx_insn *
4384 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4386 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4389 /* Make an instruction with body X and code JUMP_INSN
4390 and output it before the instruction BEFORE. */
4392 rtx_insn *
4393 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4395 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4396 make_jump_insn_raw);
4399 /* Make an instruction with body X and code CALL_INSN
4400 and output it before the instruction BEFORE. */
4402 rtx_insn *
4403 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4405 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_call_insn_raw);
4409 /* Make an instruction with body X and code DEBUG_INSN
4410 and output it before the instruction BEFORE. */
4412 rtx_insn *
4413 emit_debug_insn_before_noloc (rtx x, rtx before)
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_debug_insn_raw);
4419 /* Make an insn of code BARRIER
4420 and output it before the insn BEFORE. */
4422 rtx_barrier *
4423 emit_barrier_before (rtx before)
4425 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4427 INSN_UID (insn) = cur_insn_uid++;
4429 add_insn_before (insn, before, NULL);
4430 return insn;
4433 /* Emit the label LABEL before the insn BEFORE. */
4435 rtx_insn *
4436 emit_label_before (rtx label, rtx_insn *before)
4438 gcc_checking_assert (INSN_UID (label) == 0);
4439 INSN_UID (label) = cur_insn_uid++;
4440 add_insn_before (label, before, NULL);
4441 return as_a <rtx_insn *> (label);
4444 /* Helper for emit_insn_after, handles lists of instructions
4445 efficiently. */
4447 static rtx_insn *
4448 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4450 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4451 rtx_insn *last;
4452 rtx_insn *after_after;
4453 if (!bb && !BARRIER_P (after))
4454 bb = BLOCK_FOR_INSN (after);
4456 if (bb)
4458 df_set_bb_dirty (bb);
4459 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4460 if (!BARRIER_P (last))
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4465 if (!BARRIER_P (last))
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4470 if (BB_END (bb) == after)
4471 BB_END (bb) = last;
4473 else
4474 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4475 continue;
4477 after_after = NEXT_INSN (after);
4479 SET_NEXT_INSN (after) = first;
4480 SET_PREV_INSN (first) = after;
4481 SET_NEXT_INSN (last) = after_after;
4482 if (after_after)
4483 SET_PREV_INSN (after_after) = last;
4485 if (after == get_last_insn ())
4486 set_last_insn (last);
4488 return last;
4491 static rtx_insn *
4492 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4493 rtx_insn *(*make_raw)(rtx))
4495 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4496 rtx_insn *last = after;
4498 gcc_assert (after);
4500 if (x == NULL_RTX)
4501 return last;
4503 switch (GET_CODE (x))
4505 case DEBUG_INSN:
4506 case INSN:
4507 case JUMP_INSN:
4508 case CALL_INSN:
4509 case CODE_LABEL:
4510 case BARRIER:
4511 case NOTE:
4512 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4513 break;
4515 #ifdef ENABLE_RTL_CHECKING
4516 case SEQUENCE:
4517 gcc_unreachable ();
4518 break;
4519 #endif
4521 default:
4522 last = (*make_raw) (x);
4523 add_insn_after (last, after, bb);
4524 break;
4527 return last;
4530 /* Make X be output after the insn AFTER and set the BB of insn. If
4531 BB is NULL, an attempt is made to infer the BB from AFTER. */
4533 rtx_insn *
4534 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4536 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4540 /* Make an insn of code JUMP_INSN with body X
4541 and output it after the insn AFTER. */
4543 rtx_insn *
4544 emit_jump_insn_after_noloc (rtx x, rtx after)
4546 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4549 /* Make an instruction with body X and code CALL_INSN
4550 and output it after the instruction AFTER. */
4552 rtx_insn *
4553 emit_call_insn_after_noloc (rtx x, rtx after)
4555 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4558 /* Make an instruction with body X and code CALL_INSN
4559 and output it after the instruction AFTER. */
4561 rtx_insn *
4562 emit_debug_insn_after_noloc (rtx x, rtx after)
4564 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4567 /* Make an insn of code BARRIER
4568 and output it after the insn AFTER. */
4570 rtx_barrier *
4571 emit_barrier_after (rtx after)
4573 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4575 INSN_UID (insn) = cur_insn_uid++;
4577 add_insn_after (insn, after, NULL);
4578 return insn;
4581 /* Emit the label LABEL after the insn AFTER. */
4583 rtx_insn *
4584 emit_label_after (rtx label, rtx_insn *after)
4586 gcc_checking_assert (INSN_UID (label) == 0);
4587 INSN_UID (label) = cur_insn_uid++;
4588 add_insn_after (label, after, NULL);
4589 return as_a <rtx_insn *> (label);
4592 /* Notes require a bit of special handling: Some notes need to have their
4593 BLOCK_FOR_INSN set, others should never have it set, and some should
4594 have it set or clear depending on the context. */
4596 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4597 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4598 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4600 static bool
4601 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4603 switch (subtype)
4605 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4606 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4607 return true;
4609 /* Notes for var tracking and EH region markers can appear between or
4610 inside basic blocks. If the caller is emitting on the basic block
4611 boundary, do not set BLOCK_FOR_INSN on the new note. */
4612 case NOTE_INSN_VAR_LOCATION:
4613 case NOTE_INSN_CALL_ARG_LOCATION:
4614 case NOTE_INSN_EH_REGION_BEG:
4615 case NOTE_INSN_EH_REGION_END:
4616 return on_bb_boundary_p;
4618 /* Otherwise, BLOCK_FOR_INSN must be set. */
4619 default:
4620 return false;
4624 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4626 rtx_note *
4627 emit_note_after (enum insn_note subtype, rtx uncast_after)
4629 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4630 rtx_note *note = make_note_raw (subtype);
4631 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4632 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4634 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4635 add_insn_after_nobb (note, after);
4636 else
4637 add_insn_after (note, after, bb);
4638 return note;
4641 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4643 rtx_note *
4644 emit_note_before (enum insn_note subtype, rtx uncast_before)
4646 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4647 rtx_note *note = make_note_raw (subtype);
4648 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4649 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_before_nobb (note, before);
4653 else
4654 add_insn_before (note, before, bb);
4655 return note;
4658 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4659 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4661 static rtx_insn *
4662 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4663 rtx_insn *(*make_raw) (rtx))
4665 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4666 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4668 if (pattern == NULL_RTX || !loc)
4669 return safe_as_a <rtx_insn *> (last);
4671 after = NEXT_INSN (after);
4672 while (1)
4674 if (active_insn_p (after) && !INSN_LOCATION (after))
4675 INSN_LOCATION (after) = loc;
4676 if (after == last)
4677 break;
4678 after = NEXT_INSN (after);
4680 return safe_as_a <rtx_insn *> (last);
4683 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4684 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4685 any DEBUG_INSNs. */
4687 static rtx_insn *
4688 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4689 rtx_insn *(*make_raw) (rtx))
4691 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4692 rtx_insn *prev = after;
4694 if (skip_debug_insns)
4695 while (DEBUG_INSN_P (prev))
4696 prev = PREV_INSN (prev);
4698 if (INSN_P (prev))
4699 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4700 make_raw);
4701 else
4702 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4705 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4706 rtx_insn *
4707 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4709 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4712 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4713 rtx_insn *
4714 emit_insn_after (rtx pattern, rtx after)
4716 return emit_pattern_after (pattern, after, true, make_insn_raw);
4719 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4720 rtx_insn *
4721 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4723 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4726 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4727 rtx_insn *
4728 emit_jump_insn_after (rtx pattern, rtx after)
4730 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4733 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4734 rtx_insn *
4735 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4737 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4740 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4741 rtx_insn *
4742 emit_call_insn_after (rtx pattern, rtx after)
4744 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4747 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4748 rtx_insn *
4749 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4751 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4754 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4755 rtx_insn *
4756 emit_debug_insn_after (rtx pattern, rtx after)
4758 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4761 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4762 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4763 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4764 CALL_INSN, etc. */
4766 static rtx_insn *
4767 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4768 rtx_insn *(*make_raw) (rtx))
4770 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4771 rtx_insn *first = PREV_INSN (before);
4772 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4773 insnp ? before : NULL_RTX,
4774 NULL, make_raw);
4776 if (pattern == NULL_RTX || !loc)
4777 return last;
4779 if (!first)
4780 first = get_insns ();
4781 else
4782 first = NEXT_INSN (first);
4783 while (1)
4785 if (active_insn_p (first) && !INSN_LOCATION (first))
4786 INSN_LOCATION (first) = loc;
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4791 return last;
4794 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4795 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4796 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4797 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4799 static rtx_insn *
4800 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4801 bool insnp, rtx_insn *(*make_raw) (rtx))
4803 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4804 rtx_insn *next = before;
4806 if (skip_debug_insns)
4807 while (DEBUG_INSN_P (next))
4808 next = PREV_INSN (next);
4810 if (INSN_P (next))
4811 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4812 insnp, make_raw);
4813 else
4814 return emit_pattern_before_noloc (pattern, before,
4815 insnp ? before : NULL_RTX,
4816 NULL, make_raw);
4819 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4820 rtx_insn *
4821 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4823 return emit_pattern_before_setloc (pattern, before, loc, true,
4824 make_insn_raw);
4827 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4828 rtx_insn *
4829 emit_insn_before (rtx pattern, rtx before)
4831 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4834 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4835 rtx_insn *
4836 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4838 return emit_pattern_before_setloc (pattern, before, loc, false,
4839 make_jump_insn_raw);
4842 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4843 rtx_insn *
4844 emit_jump_insn_before (rtx pattern, rtx before)
4846 return emit_pattern_before (pattern, before, true, false,
4847 make_jump_insn_raw);
4850 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4851 rtx_insn *
4852 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4854 return emit_pattern_before_setloc (pattern, before, loc, false,
4855 make_call_insn_raw);
4858 /* Like emit_call_insn_before_noloc,
4859 but set insn_location according to BEFORE. */
4860 rtx_insn *
4861 emit_call_insn_before (rtx pattern, rtx_insn *before)
4863 return emit_pattern_before (pattern, before, true, false,
4864 make_call_insn_raw);
4867 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4868 rtx_insn *
4869 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4871 return emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_debug_insn_raw);
4875 /* Like emit_debug_insn_before_noloc,
4876 but set insn_location according to BEFORE. */
4877 rtx_insn *
4878 emit_debug_insn_before (rtx pattern, rtx before)
4880 return emit_pattern_before (pattern, before, false, false,
4881 make_debug_insn_raw);
4884 /* Take X and emit it at the end of the doubly-linked
4885 INSN list.
4887 Returns the last insn emitted. */
4889 rtx_insn *
4890 emit_insn (rtx x)
4892 rtx_insn *last = get_last_insn ();
4893 rtx_insn *insn;
4895 if (x == NULL_RTX)
4896 return last;
4898 switch (GET_CODE (x))
4900 case DEBUG_INSN:
4901 case INSN:
4902 case JUMP_INSN:
4903 case CALL_INSN:
4904 case CODE_LABEL:
4905 case BARRIER:
4906 case NOTE:
4907 insn = as_a <rtx_insn *> (x);
4908 while (insn)
4910 rtx_insn *next = NEXT_INSN (insn);
4911 add_insn (insn);
4912 last = insn;
4913 insn = next;
4915 break;
4917 #ifdef ENABLE_RTL_CHECKING
4918 case JUMP_TABLE_DATA:
4919 case SEQUENCE:
4920 gcc_unreachable ();
4921 break;
4922 #endif
4924 default:
4925 last = make_insn_raw (x);
4926 add_insn (last);
4927 break;
4930 return last;
4933 /* Make an insn of code DEBUG_INSN with pattern X
4934 and add it to the end of the doubly-linked list. */
4936 rtx_insn *
4937 emit_debug_insn (rtx x)
4939 rtx_insn *last = get_last_insn ();
4940 rtx_insn *insn;
4942 if (x == NULL_RTX)
4943 return last;
4945 switch (GET_CODE (x))
4947 case DEBUG_INSN:
4948 case INSN:
4949 case JUMP_INSN:
4950 case CALL_INSN:
4951 case CODE_LABEL:
4952 case BARRIER:
4953 case NOTE:
4954 insn = as_a <rtx_insn *> (x);
4955 while (insn)
4957 rtx_insn *next = NEXT_INSN (insn);
4958 add_insn (insn);
4959 last = insn;
4960 insn = next;
4962 break;
4964 #ifdef ENABLE_RTL_CHECKING
4965 case JUMP_TABLE_DATA:
4966 case SEQUENCE:
4967 gcc_unreachable ();
4968 break;
4969 #endif
4971 default:
4972 last = make_debug_insn_raw (x);
4973 add_insn (last);
4974 break;
4977 return last;
4980 /* Make an insn of code JUMP_INSN with pattern X
4981 and add it to the end of the doubly-linked list. */
4983 rtx_insn *
4984 emit_jump_insn (rtx x)
4986 rtx_insn *last = NULL;
4987 rtx_insn *insn;
4989 switch (GET_CODE (x))
4991 case DEBUG_INSN:
4992 case INSN:
4993 case JUMP_INSN:
4994 case CALL_INSN:
4995 case CODE_LABEL:
4996 case BARRIER:
4997 case NOTE:
4998 insn = as_a <rtx_insn *> (x);
4999 while (insn)
5001 rtx_insn *next = NEXT_INSN (insn);
5002 add_insn (insn);
5003 last = insn;
5004 insn = next;
5006 break;
5008 #ifdef ENABLE_RTL_CHECKING
5009 case JUMP_TABLE_DATA:
5010 case SEQUENCE:
5011 gcc_unreachable ();
5012 break;
5013 #endif
5015 default:
5016 last = make_jump_insn_raw (x);
5017 add_insn (last);
5018 break;
5021 return last;
5024 /* Make an insn of code CALL_INSN with pattern X
5025 and add it to the end of the doubly-linked list. */
5027 rtx_insn *
5028 emit_call_insn (rtx x)
5030 rtx_insn *insn;
5032 switch (GET_CODE (x))
5034 case DEBUG_INSN:
5035 case INSN:
5036 case JUMP_INSN:
5037 case CALL_INSN:
5038 case CODE_LABEL:
5039 case BARRIER:
5040 case NOTE:
5041 insn = emit_insn (x);
5042 break;
5044 #ifdef ENABLE_RTL_CHECKING
5045 case SEQUENCE:
5046 case JUMP_TABLE_DATA:
5047 gcc_unreachable ();
5048 break;
5049 #endif
5051 default:
5052 insn = make_call_insn_raw (x);
5053 add_insn (insn);
5054 break;
5057 return insn;
5060 /* Add the label LABEL to the end of the doubly-linked list. */
5062 rtx_insn *
5063 emit_label (rtx label)
5065 gcc_checking_assert (INSN_UID (label) == 0);
5066 INSN_UID (label) = cur_insn_uid++;
5067 add_insn (as_a <rtx_insn *> (label));
5068 return as_a <rtx_insn *> (label);
5071 /* Make an insn of code JUMP_TABLE_DATA
5072 and add it to the end of the doubly-linked list. */
5074 rtx_jump_table_data *
5075 emit_jump_table_data (rtx table)
5077 rtx_jump_table_data *jump_table_data =
5078 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5079 INSN_UID (jump_table_data) = cur_insn_uid++;
5080 PATTERN (jump_table_data) = table;
5081 BLOCK_FOR_INSN (jump_table_data) = NULL;
5082 add_insn (jump_table_data);
5083 return jump_table_data;
5086 /* Make an insn of code BARRIER
5087 and add it to the end of the doubly-linked list. */
5089 rtx_barrier *
5090 emit_barrier (void)
5092 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5093 INSN_UID (barrier) = cur_insn_uid++;
5094 add_insn (barrier);
5095 return barrier;
5098 /* Emit a copy of note ORIG. */
5100 rtx_note *
5101 emit_note_copy (rtx_note *orig)
5103 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5104 rtx_note *note = make_note_raw (kind);
5105 NOTE_DATA (note) = NOTE_DATA (orig);
5106 add_insn (note);
5107 return note;
5110 /* Make an insn of code NOTE or type NOTE_NO
5111 and add it to the end of the doubly-linked list. */
5113 rtx_note *
5114 emit_note (enum insn_note kind)
5116 rtx_note *note = make_note_raw (kind);
5117 add_insn (note);
5118 return note;
5121 /* Emit a clobber of lvalue X. */
5123 rtx_insn *
5124 emit_clobber (rtx x)
5126 /* CONCATs should not appear in the insn stream. */
5127 if (GET_CODE (x) == CONCAT)
5129 emit_clobber (XEXP (x, 0));
5130 return emit_clobber (XEXP (x, 1));
5132 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5135 /* Return a sequence of insns to clobber lvalue X. */
5137 rtx_insn *
5138 gen_clobber (rtx x)
5140 rtx_insn *seq;
5142 start_sequence ();
5143 emit_clobber (x);
5144 seq = get_insns ();
5145 end_sequence ();
5146 return seq;
5149 /* Emit a use of rvalue X. */
5151 rtx_insn *
5152 emit_use (rtx x)
5154 /* CONCATs should not appear in the insn stream. */
5155 if (GET_CODE (x) == CONCAT)
5157 emit_use (XEXP (x, 0));
5158 return emit_use (XEXP (x, 1));
5160 return emit_insn (gen_rtx_USE (VOIDmode, x));
5163 /* Return a sequence of insns to use rvalue X. */
5165 rtx_insn *
5166 gen_use (rtx x)
5168 rtx_insn *seq;
5170 start_sequence ();
5171 emit_use (x);
5172 seq = get_insns ();
5173 end_sequence ();
5174 return seq;
5177 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5178 Return the set in INSN that such notes describe, or NULL if the notes
5179 have no meaning for INSN. */
5182 set_for_reg_notes (rtx insn)
5184 rtx pat, reg;
5186 if (!INSN_P (insn))
5187 return NULL_RTX;
5189 pat = PATTERN (insn);
5190 if (GET_CODE (pat) == PARALLEL)
5192 /* We do not use single_set because that ignores SETs of unused
5193 registers. REG_EQUAL and REG_EQUIV notes really do require the
5194 PARALLEL to have a single SET. */
5195 if (multiple_sets (insn))
5196 return NULL_RTX;
5197 pat = XVECEXP (pat, 0, 0);
5200 if (GET_CODE (pat) != SET)
5201 return NULL_RTX;
5203 reg = SET_DEST (pat);
5205 /* Notes apply to the contents of a STRICT_LOW_PART. */
5206 if (GET_CODE (reg) == STRICT_LOW_PART)
5207 reg = XEXP (reg, 0);
5209 /* Check that we have a register. */
5210 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5211 return NULL_RTX;
5213 return pat;
5216 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5217 note of this type already exists, remove it first. */
5220 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5222 rtx note = find_reg_note (insn, kind, NULL_RTX);
5224 switch (kind)
5226 case REG_EQUAL:
5227 case REG_EQUIV:
5228 if (!set_for_reg_notes (insn))
5229 return NULL_RTX;
5231 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5232 It serves no useful purpose and breaks eliminate_regs. */
5233 if (GET_CODE (datum) == ASM_OPERANDS)
5234 return NULL_RTX;
5236 /* Notes with side effects are dangerous. Even if the side-effect
5237 initially mirrors one in PATTERN (INSN), later optimizations
5238 might alter the way that the final register value is calculated
5239 and so move or alter the side-effect in some way. The note would
5240 then no longer be a valid substitution for SET_SRC. */
5241 if (side_effects_p (datum))
5242 return NULL_RTX;
5243 break;
5245 default:
5246 break;
5249 if (note)
5250 XEXP (note, 0) = datum;
5251 else
5253 add_reg_note (insn, kind, datum);
5254 note = REG_NOTES (insn);
5257 switch (kind)
5259 case REG_EQUAL:
5260 case REG_EQUIV:
5261 df_notes_rescan (as_a <rtx_insn *> (insn));
5262 break;
5263 default:
5264 break;
5267 return note;
5270 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5272 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5274 rtx set = set_for_reg_notes (insn);
5276 if (set && SET_DEST (set) == dst)
5277 return set_unique_reg_note (insn, kind, datum);
5278 return NULL_RTX;
5281 /* Return an indication of which type of insn should have X as a body.
5282 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5284 static enum rtx_code
5285 classify_insn (rtx x)
5287 if (LABEL_P (x))
5288 return CODE_LABEL;
5289 if (GET_CODE (x) == CALL)
5290 return CALL_INSN;
5291 if (ANY_RETURN_P (x))
5292 return JUMP_INSN;
5293 if (GET_CODE (x) == SET)
5295 if (SET_DEST (x) == pc_rtx)
5296 return JUMP_INSN;
5297 else if (GET_CODE (SET_SRC (x)) == CALL)
5298 return CALL_INSN;
5299 else
5300 return INSN;
5302 if (GET_CODE (x) == PARALLEL)
5304 int j;
5305 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5306 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5307 return CALL_INSN;
5308 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5309 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5310 return JUMP_INSN;
5311 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5312 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5313 return CALL_INSN;
5315 return INSN;
5318 /* Emit the rtl pattern X as an appropriate kind of insn.
5319 If X is a label, it is simply added into the insn chain. */
5321 rtx_insn *
5322 emit (rtx x)
5324 enum rtx_code code = classify_insn (x);
5326 switch (code)
5328 case CODE_LABEL:
5329 return emit_label (x);
5330 case INSN:
5331 return emit_insn (x);
5332 case JUMP_INSN:
5334 rtx_insn *insn = emit_jump_insn (x);
5335 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5336 return emit_barrier ();
5337 return insn;
5339 case CALL_INSN:
5340 return emit_call_insn (x);
5341 case DEBUG_INSN:
5342 return emit_debug_insn (x);
5343 default:
5344 gcc_unreachable ();
5348 /* Space for free sequence stack entries. */
5349 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5351 /* Begin emitting insns to a sequence. If this sequence will contain
5352 something that might cause the compiler to pop arguments to function
5353 calls (because those pops have previously been deferred; see
5354 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5355 before calling this function. That will ensure that the deferred
5356 pops are not accidentally emitted in the middle of this sequence. */
5358 void
5359 start_sequence (void)
5361 struct sequence_stack *tem;
5363 if (free_sequence_stack != NULL)
5365 tem = free_sequence_stack;
5366 free_sequence_stack = tem->next;
5368 else
5369 tem = ggc_alloc<sequence_stack> ();
5371 tem->next = seq_stack;
5372 tem->first = get_insns ();
5373 tem->last = get_last_insn ();
5375 seq_stack = tem;
5377 set_first_insn (0);
5378 set_last_insn (0);
5381 /* Set up the insn chain starting with FIRST as the current sequence,
5382 saving the previously current one. See the documentation for
5383 start_sequence for more information about how to use this function. */
5385 void
5386 push_to_sequence (rtx_insn *first)
5388 rtx_insn *last;
5390 start_sequence ();
5392 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5395 set_first_insn (first);
5396 set_last_insn (last);
5399 /* Like push_to_sequence, but take the last insn as an argument to avoid
5400 looping through the list. */
5402 void
5403 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5405 start_sequence ();
5407 set_first_insn (first);
5408 set_last_insn (last);
5411 /* Set up the outer-level insn chain
5412 as the current sequence, saving the previously current one. */
5414 void
5415 push_topmost_sequence (void)
5417 struct sequence_stack *stack, *top = NULL;
5419 start_sequence ();
5421 for (stack = seq_stack; stack; stack = stack->next)
5422 top = stack;
5424 set_first_insn (top->first);
5425 set_last_insn (top->last);
5428 /* After emitting to the outer-level insn chain, update the outer-level
5429 insn chain, and restore the previous saved state. */
5431 void
5432 pop_topmost_sequence (void)
5434 struct sequence_stack *stack, *top = NULL;
5436 for (stack = seq_stack; stack; stack = stack->next)
5437 top = stack;
5439 top->first = get_insns ();
5440 top->last = get_last_insn ();
5442 end_sequence ();
5445 /* After emitting to a sequence, restore previous saved state.
5447 To get the contents of the sequence just made, you must call
5448 `get_insns' *before* calling here.
5450 If the compiler might have deferred popping arguments while
5451 generating this sequence, and this sequence will not be immediately
5452 inserted into the instruction stream, use do_pending_stack_adjust
5453 before calling get_insns. That will ensure that the deferred
5454 pops are inserted into this sequence, and not into some random
5455 location in the instruction stream. See INHIBIT_DEFER_POP for more
5456 information about deferred popping of arguments. */
5458 void
5459 end_sequence (void)
5461 struct sequence_stack *tem = seq_stack;
5463 set_first_insn (tem->first);
5464 set_last_insn (tem->last);
5465 seq_stack = tem->next;
5467 memset (tem, 0, sizeof (*tem));
5468 tem->next = free_sequence_stack;
5469 free_sequence_stack = tem;
5472 /* Return 1 if currently emitting into a sequence. */
5475 in_sequence_p (void)
5477 return seq_stack != 0;
5480 /* Put the various virtual registers into REGNO_REG_RTX. */
5482 static void
5483 init_virtual_regs (void)
5485 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5486 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5487 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5488 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5489 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5490 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5491 = virtual_preferred_stack_boundary_rtx;
5495 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5496 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5497 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5498 static int copy_insn_n_scratches;
5500 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5501 copied an ASM_OPERANDS.
5502 In that case, it is the original input-operand vector. */
5503 static rtvec orig_asm_operands_vector;
5505 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5506 copied an ASM_OPERANDS.
5507 In that case, it is the copied input-operand vector. */
5508 static rtvec copy_asm_operands_vector;
5510 /* Likewise for the constraints vector. */
5511 static rtvec orig_asm_constraints_vector;
5512 static rtvec copy_asm_constraints_vector;
5514 /* Recursively create a new copy of an rtx for copy_insn.
5515 This function differs from copy_rtx in that it handles SCRATCHes and
5516 ASM_OPERANDs properly.
5517 Normally, this function is not used directly; use copy_insn as front end.
5518 However, you could first copy an insn pattern with copy_insn and then use
5519 this function afterwards to properly copy any REG_NOTEs containing
5520 SCRATCHes. */
5523 copy_insn_1 (rtx orig)
5525 rtx copy;
5526 int i, j;
5527 RTX_CODE code;
5528 const char *format_ptr;
5530 if (orig == NULL)
5531 return NULL;
5533 code = GET_CODE (orig);
5535 switch (code)
5537 case REG:
5538 case DEBUG_EXPR:
5539 CASE_CONST_ANY:
5540 case SYMBOL_REF:
5541 case CODE_LABEL:
5542 case PC:
5543 case CC0:
5544 case RETURN:
5545 case SIMPLE_RETURN:
5546 return orig;
5547 case CLOBBER:
5548 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5549 clobbers or clobbers of hard registers that originated as pseudos.
5550 This is needed to allow safe register renaming. */
5551 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5552 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5553 return orig;
5554 break;
5556 case SCRATCH:
5557 for (i = 0; i < copy_insn_n_scratches; i++)
5558 if (copy_insn_scratch_in[i] == orig)
5559 return copy_insn_scratch_out[i];
5560 break;
5562 case CONST:
5563 if (shared_const_p (orig))
5564 return orig;
5565 break;
5567 /* A MEM with a constant address is not sharable. The problem is that
5568 the constant address may need to be reloaded. If the mem is shared,
5569 then reloading one copy of this mem will cause all copies to appear
5570 to have been reloaded. */
5572 default:
5573 break;
5576 /* Copy the various flags, fields, and other information. We assume
5577 that all fields need copying, and then clear the fields that should
5578 not be copied. That is the sensible default behavior, and forces
5579 us to explicitly document why we are *not* copying a flag. */
5580 copy = shallow_copy_rtx (orig);
5582 /* We do not copy the USED flag, which is used as a mark bit during
5583 walks over the RTL. */
5584 RTX_FLAG (copy, used) = 0;
5586 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5587 if (INSN_P (orig))
5589 RTX_FLAG (copy, jump) = 0;
5590 RTX_FLAG (copy, call) = 0;
5591 RTX_FLAG (copy, frame_related) = 0;
5594 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5596 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5597 switch (*format_ptr++)
5599 case 'e':
5600 if (XEXP (orig, i) != NULL)
5601 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5602 break;
5604 case 'E':
5605 case 'V':
5606 if (XVEC (orig, i) == orig_asm_constraints_vector)
5607 XVEC (copy, i) = copy_asm_constraints_vector;
5608 else if (XVEC (orig, i) == orig_asm_operands_vector)
5609 XVEC (copy, i) = copy_asm_operands_vector;
5610 else if (XVEC (orig, i) != NULL)
5612 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5613 for (j = 0; j < XVECLEN (copy, i); j++)
5614 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5616 break;
5618 case 't':
5619 case 'w':
5620 case 'i':
5621 case 's':
5622 case 'S':
5623 case 'u':
5624 case '0':
5625 /* These are left unchanged. */
5626 break;
5628 default:
5629 gcc_unreachable ();
5632 if (code == SCRATCH)
5634 i = copy_insn_n_scratches++;
5635 gcc_assert (i < MAX_RECOG_OPERANDS);
5636 copy_insn_scratch_in[i] = orig;
5637 copy_insn_scratch_out[i] = copy;
5639 else if (code == ASM_OPERANDS)
5641 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5642 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5643 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5644 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5647 return copy;
5650 /* Create a new copy of an rtx.
5651 This function differs from copy_rtx in that it handles SCRATCHes and
5652 ASM_OPERANDs properly.
5653 INSN doesn't really have to be a full INSN; it could be just the
5654 pattern. */
5656 copy_insn (rtx insn)
5658 copy_insn_n_scratches = 0;
5659 orig_asm_operands_vector = 0;
5660 orig_asm_constraints_vector = 0;
5661 copy_asm_operands_vector = 0;
5662 copy_asm_constraints_vector = 0;
5663 return copy_insn_1 (insn);
5666 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5667 on that assumption that INSN itself remains in its original place. */
5669 rtx_insn *
5670 copy_delay_slot_insn (rtx_insn *insn)
5672 /* Copy INSN with its rtx_code, all its notes, location etc. */
5673 insn = as_a <rtx_insn *> (copy_rtx (insn));
5674 INSN_UID (insn) = cur_insn_uid++;
5675 return insn;
5678 /* Initialize data structures and variables in this file
5679 before generating rtl for each function. */
5681 void
5682 init_emit (void)
5684 set_first_insn (NULL);
5685 set_last_insn (NULL);
5686 if (MIN_NONDEBUG_INSN_UID)
5687 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5688 else
5689 cur_insn_uid = 1;
5690 cur_debug_insn_uid = 1;
5691 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5692 first_label_num = label_num;
5693 seq_stack = NULL;
5695 /* Init the tables that describe all the pseudo regs. */
5697 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5699 crtl->emit.regno_pointer_align
5700 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5702 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5704 /* Put copies of all the hard registers into regno_reg_rtx. */
5705 memcpy (regno_reg_rtx,
5706 initial_regno_reg_rtx,
5707 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5709 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5710 init_virtual_regs ();
5712 /* Indicate that the virtual registers and stack locations are
5713 all pointers. */
5714 REG_POINTER (stack_pointer_rtx) = 1;
5715 REG_POINTER (frame_pointer_rtx) = 1;
5716 REG_POINTER (hard_frame_pointer_rtx) = 1;
5717 REG_POINTER (arg_pointer_rtx) = 1;
5719 REG_POINTER (virtual_incoming_args_rtx) = 1;
5720 REG_POINTER (virtual_stack_vars_rtx) = 1;
5721 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5722 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5723 REG_POINTER (virtual_cfa_rtx) = 1;
5725 #ifdef STACK_BOUNDARY
5726 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5727 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5728 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5731 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5732 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5733 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5736 #endif
5738 #ifdef INIT_EXPANDERS
5739 INIT_EXPANDERS;
5740 #endif
5743 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5745 static rtx
5746 gen_const_vector (enum machine_mode mode, int constant)
5748 rtx tem;
5749 rtvec v;
5750 int units, i;
5751 enum machine_mode inner;
5753 units = GET_MODE_NUNITS (mode);
5754 inner = GET_MODE_INNER (mode);
5756 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5758 v = rtvec_alloc (units);
5760 /* We need to call this function after we set the scalar const_tiny_rtx
5761 entries. */
5762 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5764 for (i = 0; i < units; ++i)
5765 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5767 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5768 return tem;
5771 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5772 all elements are zero, and the one vector when all elements are one. */
5774 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5776 enum machine_mode inner = GET_MODE_INNER (mode);
5777 int nunits = GET_MODE_NUNITS (mode);
5778 rtx x;
5779 int i;
5781 /* Check to see if all of the elements have the same value. */
5782 x = RTVEC_ELT (v, nunits - 1);
5783 for (i = nunits - 2; i >= 0; i--)
5784 if (RTVEC_ELT (v, i) != x)
5785 break;
5787 /* If the values are all the same, check to see if we can use one of the
5788 standard constant vectors. */
5789 if (i == -1)
5791 if (x == CONST0_RTX (inner))
5792 return CONST0_RTX (mode);
5793 else if (x == CONST1_RTX (inner))
5794 return CONST1_RTX (mode);
5795 else if (x == CONSTM1_RTX (inner))
5796 return CONSTM1_RTX (mode);
5799 return gen_rtx_raw_CONST_VECTOR (mode, v);
5802 /* Initialise global register information required by all functions. */
5804 void
5805 init_emit_regs (void)
5807 int i;
5808 enum machine_mode mode;
5809 mem_attrs *attrs;
5811 /* Reset register attributes */
5812 htab_empty (reg_attrs_htab);
5814 /* We need reg_raw_mode, so initialize the modes now. */
5815 init_reg_modes_target ();
5817 /* Assign register numbers to the globally defined register rtx. */
5818 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5819 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5820 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5821 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5822 virtual_incoming_args_rtx =
5823 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5824 virtual_stack_vars_rtx =
5825 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5826 virtual_stack_dynamic_rtx =
5827 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5828 virtual_outgoing_args_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5830 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5831 virtual_preferred_stack_boundary_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5834 /* Initialize RTL for commonly used hard registers. These are
5835 copied into regno_reg_rtx as we begin to compile each function. */
5836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5837 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5839 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5840 return_address_pointer_rtx
5841 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5842 #endif
5844 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5845 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5846 else
5847 pic_offset_table_rtx = NULL_RTX;
5849 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5851 mode = (enum machine_mode) i;
5852 attrs = ggc_cleared_alloc<mem_attrs> ();
5853 attrs->align = BITS_PER_UNIT;
5854 attrs->addrspace = ADDR_SPACE_GENERIC;
5855 if (mode != BLKmode)
5857 attrs->size_known_p = true;
5858 attrs->size = GET_MODE_SIZE (mode);
5859 if (STRICT_ALIGNMENT)
5860 attrs->align = GET_MODE_ALIGNMENT (mode);
5862 mode_mem_attrs[i] = attrs;
5866 /* Initialize global machine_mode variables. */
5868 void
5869 init_derived_machine_modes (void)
5871 byte_mode = VOIDmode;
5872 word_mode = VOIDmode;
5874 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5875 mode != VOIDmode;
5876 mode = GET_MODE_WIDER_MODE (mode))
5878 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5879 && byte_mode == VOIDmode)
5880 byte_mode = mode;
5882 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5883 && word_mode == VOIDmode)
5884 word_mode = mode;
5887 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5890 /* Create some permanent unique rtl objects shared between all functions. */
5892 void
5893 init_emit_once (void)
5895 int i;
5896 enum machine_mode mode;
5897 enum machine_mode double_mode;
5899 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5900 CONST_FIXED, and memory attribute hash tables. */
5901 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5902 const_int_htab_eq, NULL);
5904 #if TARGET_SUPPORTS_WIDE_INT
5905 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5906 const_wide_int_htab_eq, NULL);
5907 #endif
5908 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5909 const_double_htab_eq, NULL);
5911 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5912 const_fixed_htab_eq, NULL);
5914 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5915 reg_attrs_htab_eq, NULL);
5917 #ifdef INIT_EXPANDERS
5918 /* This is to initialize {init|mark|free}_machine_status before the first
5919 call to push_function_context_to. This is needed by the Chill front
5920 end which calls push_function_context_to before the first call to
5921 init_function_start. */
5922 INIT_EXPANDERS;
5923 #endif
5925 /* Create the unique rtx's for certain rtx codes and operand values. */
5927 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5928 tries to use these variables. */
5929 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5930 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5931 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5933 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5934 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5935 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5936 else
5937 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5939 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5941 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5942 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5943 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5945 dconstm1 = dconst1;
5946 dconstm1.sign = 1;
5948 dconsthalf = dconst1;
5949 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5951 for (i = 0; i < 3; i++)
5953 const REAL_VALUE_TYPE *const r =
5954 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5957 mode != VOIDmode;
5958 mode = GET_MODE_WIDER_MODE (mode))
5959 const_tiny_rtx[i][(int) mode] =
5960 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5965 const_tiny_rtx[i][(int) mode] =
5966 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5968 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5975 for (mode = MIN_MODE_PARTIAL_INT;
5976 mode <= MAX_MODE_PARTIAL_INT;
5977 mode = (enum machine_mode)((int)(mode) + 1))
5978 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5981 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5988 for (mode = MIN_MODE_PARTIAL_INT;
5989 mode <= MAX_MODE_PARTIAL_INT;
5990 mode = (enum machine_mode)((int)(mode) + 1))
5991 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
5997 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5998 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6005 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6006 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6010 mode != VOIDmode;
6011 mode = GET_MODE_WIDER_MODE (mode))
6013 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6014 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6015 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
6022 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6023 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6027 mode != VOIDmode;
6028 mode = GET_MODE_WIDER_MODE (mode))
6030 FCONST0 (mode).data.high = 0;
6031 FCONST0 (mode).data.low = 0;
6032 FCONST0 (mode).mode = mode;
6033 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6034 FCONST0 (mode), mode);
6037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6038 mode != VOIDmode;
6039 mode = GET_MODE_WIDER_MODE (mode))
6041 FCONST0 (mode).data.high = 0;
6042 FCONST0 (mode).data.low = 0;
6043 FCONST0 (mode).mode = mode;
6044 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6045 FCONST0 (mode), mode);
6048 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6049 mode != VOIDmode;
6050 mode = GET_MODE_WIDER_MODE (mode))
6052 FCONST0 (mode).data.high = 0;
6053 FCONST0 (mode).data.low = 0;
6054 FCONST0 (mode).mode = mode;
6055 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6056 FCONST0 (mode), mode);
6058 /* We store the value 1. */
6059 FCONST1 (mode).data.high = 0;
6060 FCONST1 (mode).data.low = 0;
6061 FCONST1 (mode).mode = mode;
6062 FCONST1 (mode).data
6063 = double_int_one.lshift (GET_MODE_FBIT (mode),
6064 HOST_BITS_PER_DOUBLE_INT,
6065 SIGNED_FIXED_POINT_MODE_P (mode));
6066 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6067 FCONST1 (mode), mode);
6070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6071 mode != VOIDmode;
6072 mode = GET_MODE_WIDER_MODE (mode))
6074 FCONST0 (mode).data.high = 0;
6075 FCONST0 (mode).data.low = 0;
6076 FCONST0 (mode).mode = mode;
6077 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6078 FCONST0 (mode), mode);
6080 /* We store the value 1. */
6081 FCONST1 (mode).data.high = 0;
6082 FCONST1 (mode).data.low = 0;
6083 FCONST1 (mode).mode = mode;
6084 FCONST1 (mode).data
6085 = double_int_one.lshift (GET_MODE_FBIT (mode),
6086 HOST_BITS_PER_DOUBLE_INT,
6087 SIGNED_FIXED_POINT_MODE_P (mode));
6088 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6089 FCONST1 (mode), mode);
6092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6093 mode != VOIDmode;
6094 mode = GET_MODE_WIDER_MODE (mode))
6096 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6114 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6115 mode != VOIDmode;
6116 mode = GET_MODE_WIDER_MODE (mode))
6118 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6119 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6122 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6123 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6124 const_tiny_rtx[0][i] = const0_rtx;
6126 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6127 if (STORE_FLAG_VALUE == 1)
6128 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6130 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6131 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6132 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6133 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6136 /* Produce exact duplicate of insn INSN after AFTER.
6137 Care updating of libcall regions if present. */
6139 rtx_insn *
6140 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6142 rtx_insn *new_rtx;
6143 rtx link;
6145 switch (GET_CODE (insn))
6147 case INSN:
6148 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6149 break;
6151 case JUMP_INSN:
6152 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6153 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6154 break;
6156 case DEBUG_INSN:
6157 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6158 break;
6160 case CALL_INSN:
6161 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6162 if (CALL_INSN_FUNCTION_USAGE (insn))
6163 CALL_INSN_FUNCTION_USAGE (new_rtx)
6164 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6165 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6166 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6167 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6168 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6169 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6170 break;
6172 default:
6173 gcc_unreachable ();
6176 /* Update LABEL_NUSES. */
6177 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6179 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6181 /* If the old insn is frame related, then so is the new one. This is
6182 primarily needed for IA-64 unwind info which marks epilogue insns,
6183 which may be duplicated by the basic block reordering code. */
6184 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6186 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6187 will make them. REG_LABEL_TARGETs are created there too, but are
6188 supposed to be sticky, so we copy them. */
6189 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6190 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6192 if (GET_CODE (link) == EXPR_LIST)
6193 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6194 copy_insn_1 (XEXP (link, 0)));
6195 else
6196 add_shallow_copy_of_reg_note (new_rtx, link);
6199 INSN_CODE (new_rtx) = INSN_CODE (insn);
6200 return new_rtx;
6203 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6205 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6207 if (hard_reg_clobbers[mode][regno])
6208 return hard_reg_clobbers[mode][regno];
6209 else
6210 return (hard_reg_clobbers[mode][regno] =
6211 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6214 location_t prologue_location;
6215 location_t epilogue_location;
6217 /* Hold current location information and last location information, so the
6218 datastructures are built lazily only when some instructions in given
6219 place are needed. */
6220 static location_t curr_location;
6222 /* Allocate insn location datastructure. */
6223 void
6224 insn_locations_init (void)
6226 prologue_location = epilogue_location = 0;
6227 curr_location = UNKNOWN_LOCATION;
6230 /* At the end of emit stage, clear current location. */
6231 void
6232 insn_locations_finalize (void)
6234 epilogue_location = curr_location;
6235 curr_location = UNKNOWN_LOCATION;
6238 /* Set current location. */
6239 void
6240 set_curr_insn_location (location_t location)
6242 curr_location = location;
6245 /* Get current location. */
6246 location_t
6247 curr_insn_location (void)
6249 return curr_location;
6252 /* Return lexical scope block insn belongs to. */
6253 tree
6254 insn_scope (const rtx_insn *insn)
6256 return LOCATION_BLOCK (INSN_LOCATION (insn));
6259 /* Return line number of the statement that produced this insn. */
6261 insn_line (const rtx_insn *insn)
6263 return LOCATION_LINE (INSN_LOCATION (insn));
6266 /* Return source file of the statement that produced this insn. */
6267 const char *
6268 insn_file (const rtx_insn *insn)
6270 return LOCATION_FILE (INSN_LOCATION (insn));
6273 /* Return expanded location of the statement that produced this insn. */
6274 expanded_location
6275 insn_location (const rtx_insn *insn)
6277 return expand_location (INSN_LOCATION (insn));
6280 /* Return true if memory model MODEL requires a pre-operation (release-style)
6281 barrier or a post-operation (acquire-style) barrier. While not universal,
6282 this function matches behavior of several targets. */
6284 bool
6285 need_atomic_barrier_p (enum memmodel model, bool pre)
6287 switch (model & MEMMODEL_MASK)
6289 case MEMMODEL_RELAXED:
6290 case MEMMODEL_CONSUME:
6291 return false;
6292 case MEMMODEL_RELEASE:
6293 return pre;
6294 case MEMMODEL_ACQUIRE:
6295 return !pre;
6296 case MEMMODEL_ACQ_REL:
6297 case MEMMODEL_SEQ_CST:
6298 return true;
6299 default:
6300 gcc_unreachable ();
6304 #include "gt-emit-rtl.h"