Make std::vector<bool> meet C++11 allocator requirements.
[official-gcc.git] / gcc / emit-rtl.c
blob1606232f46232f4c87e16fd360837b7c08258e81
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "varasm.h"
42 #include "predict.h"
43 #include "vec.h"
44 #include "hashtab.h"
45 #include "hash-set.h"
46 #include "machmode.h"
47 #include "hard-reg-set.h"
48 #include "input.h"
49 #include "function.h"
50 #include "cfgrtl.h"
51 #include "basic-block.h"
52 #include "tree-eh.h"
53 #include "tm_p.h"
54 #include "flags.h"
55 #include "stringpool.h"
56 #include "expr.h"
57 #include "regs.h"
58 #include "insn-config.h"
59 #include "recog.h"
60 #include "bitmap.h"
61 #include "debug.h"
62 #include "langhooks.h"
63 #include "df.h"
64 #include "params.h"
65 #include "target.h"
66 #include "builtins.h"
67 #include "rtl-iter.h"
69 struct target_rtl default_target_rtl;
70 #if SWITCHABLE_TARGET
71 struct target_rtl *this_target_rtl = &default_target_rtl;
72 #endif
74 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
76 /* Commonly used modes. */
78 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
79 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
80 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
81 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
83 /* Datastructures maintained for currently processed function in RTL form. */
85 struct rtl_data x_rtl;
87 /* Indexed by pseudo register number, gives the rtx for that pseudo.
88 Allocated in parallel with regno_pointer_align.
89 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
90 with length attribute nested in top level structures. */
92 rtx * regno_reg_rtx;
94 /* This is *not* reset after each function. It gives each CODE_LABEL
95 in the entire compilation a unique label number. */
97 static GTY(()) int label_num = 1;
99 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
100 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
101 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
102 is set only for MODE_INT and MODE_VECTOR_INT modes. */
104 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
106 rtx const_true_rtx;
108 REAL_VALUE_TYPE dconst0;
109 REAL_VALUE_TYPE dconst1;
110 REAL_VALUE_TYPE dconst2;
111 REAL_VALUE_TYPE dconstm1;
112 REAL_VALUE_TYPE dconsthalf;
114 /* Record fixed-point constant 0 and 1. */
115 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
118 /* We make one copy of (const_int C) where C is in
119 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
120 to save space during the compilation and simplify comparisons of
121 integers. */
123 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
125 /* Standard pieces of rtx, to be substituted directly into things. */
126 rtx pc_rtx;
127 rtx ret_rtx;
128 rtx simple_return_rtx;
129 rtx cc0_rtx;
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_int_htab;
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_wide_int_htab;
140 /* A hash table storing register attribute structures. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
142 htab_t reg_attrs_htab;
144 /* A hash table storing all CONST_DOUBLEs. */
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_double_htab;
148 /* A hash table storing all CONST_FIXEDs. */
149 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
150 htab_t const_fixed_htab;
152 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
153 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
154 #define first_label_num (crtl->emit.x_first_label_num)
156 static void set_used_decls (tree);
157 static void mark_label_nuses (rtx);
158 static hashval_t const_int_htab_hash (const void *);
159 static int const_int_htab_eq (const void *, const void *);
160 #if TARGET_SUPPORTS_WIDE_INT
161 static hashval_t const_wide_int_htab_hash (const void *);
162 static int const_wide_int_htab_eq (const void *, const void *);
163 static rtx lookup_const_wide_int (rtx);
164 #endif
165 static hashval_t const_double_htab_hash (const void *);
166 static int const_double_htab_eq (const void *, const void *);
167 static rtx lookup_const_double (rtx);
168 static hashval_t const_fixed_htab_hash (const void *);
169 static int const_fixed_htab_eq (const void *, const void *);
170 static rtx lookup_const_fixed (rtx);
171 static hashval_t reg_attrs_htab_hash (const void *);
172 static int reg_attrs_htab_eq (const void *, const void *);
173 static reg_attrs *get_reg_attrs (tree, int);
174 static rtx gen_const_vector (machine_mode, int);
175 static void copy_rtx_if_shared_1 (rtx *orig);
177 /* Probability of the conditional branch currently proceeded by try_split.
178 Set to -1 otherwise. */
179 int split_branch_probability = -1;
181 /* Returns a hash code for X (which is a really a CONST_INT). */
183 static hashval_t
184 const_int_htab_hash (const void *x)
186 return (hashval_t) INTVAL ((const_rtx) x);
189 /* Returns nonzero if the value represented by X (which is really a
190 CONST_INT) is the same as that given by Y (which is really a
191 HOST_WIDE_INT *). */
193 static int
194 const_int_htab_eq (const void *x, const void *y)
196 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
199 #if TARGET_SUPPORTS_WIDE_INT
200 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
202 static hashval_t
203 const_wide_int_htab_hash (const void *x)
205 int i;
206 HOST_WIDE_INT hash = 0;
207 const_rtx xr = (const_rtx) x;
209 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
210 hash += CONST_WIDE_INT_ELT (xr, i);
212 return (hashval_t) hash;
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_WIDE_INT) is the same as that given by Y (which is really a
217 CONST_WIDE_INT). */
219 static int
220 const_wide_int_htab_eq (const void *x, const void *y)
222 int i;
223 const_rtx xr = (const_rtx) x;
224 const_rtx yr = (const_rtx) y;
225 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
226 return 0;
228 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
229 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
230 return 0;
232 return 1;
234 #endif
236 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
237 static hashval_t
238 const_double_htab_hash (const void *x)
240 const_rtx const value = (const_rtx) x;
241 hashval_t h;
243 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
244 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
245 else
247 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
248 /* MODE is used in the comparison, so it should be in the hash. */
249 h ^= GET_MODE (value);
251 return h;
254 /* Returns nonzero if the value represented by X (really a ...)
255 is the same as that represented by Y (really a ...) */
256 static int
257 const_double_htab_eq (const void *x, const void *y)
259 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
261 if (GET_MODE (a) != GET_MODE (b))
262 return 0;
263 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
264 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
265 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
266 else
267 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
268 CONST_DOUBLE_REAL_VALUE (b));
271 /* Returns a hash code for X (which is really a CONST_FIXED). */
273 static hashval_t
274 const_fixed_htab_hash (const void *x)
276 const_rtx const value = (const_rtx) x;
277 hashval_t h;
279 h = fixed_hash (CONST_FIXED_VALUE (value));
280 /* MODE is used in the comparison, so it should be in the hash. */
281 h ^= GET_MODE (value);
282 return h;
285 /* Returns nonzero if the value represented by X (really a ...)
286 is the same as that represented by Y (really a ...). */
288 static int
289 const_fixed_htab_eq (const void *x, const void *y)
291 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
293 if (GET_MODE (a) != GET_MODE (b))
294 return 0;
295 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
298 /* Return true if the given memory attributes are equal. */
300 bool
301 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
303 if (p == q)
304 return true;
305 if (!p || !q)
306 return false;
307 return (p->alias == q->alias
308 && p->offset_known_p == q->offset_known_p
309 && (!p->offset_known_p || p->offset == q->offset)
310 && p->size_known_p == q->size_known_p
311 && (!p->size_known_p || p->size == q->size)
312 && p->align == q->align
313 && p->addrspace == q->addrspace
314 && (p->expr == q->expr
315 || (p->expr != NULL_TREE && q->expr != NULL_TREE
316 && operand_equal_p (p->expr, q->expr, 0))));
319 /* Set MEM's memory attributes so that they are the same as ATTRS. */
321 static void
322 set_mem_attrs (rtx mem, mem_attrs *attrs)
324 /* If everything is the default, we can just clear the attributes. */
325 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
327 MEM_ATTRS (mem) = 0;
328 return;
331 if (!MEM_ATTRS (mem)
332 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
334 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
335 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
339 /* Returns a hash code for X (which is a really a reg_attrs *). */
341 static hashval_t
342 reg_attrs_htab_hash (const void *x)
344 const reg_attrs *const p = (const reg_attrs *) x;
346 return ((p->offset * 1000) ^ (intptr_t) p->decl);
349 /* Returns nonzero if the value represented by X (which is really a
350 reg_attrs *) is the same as that given by Y (which is also really a
351 reg_attrs *). */
353 static int
354 reg_attrs_htab_eq (const void *x, const void *y)
356 const reg_attrs *const p = (const reg_attrs *) x;
357 const reg_attrs *const q = (const reg_attrs *) y;
359 return (p->decl == q->decl && p->offset == q->offset);
361 /* Allocate a new reg_attrs structure and insert it into the hash table if
362 one identical to it is not already in the table. We are doing this for
363 MEM of mode MODE. */
365 static reg_attrs *
366 get_reg_attrs (tree decl, int offset)
368 reg_attrs attrs;
369 void **slot;
371 /* If everything is the default, we can just return zero. */
372 if (decl == 0 && offset == 0)
373 return 0;
375 attrs.decl = decl;
376 attrs.offset = offset;
378 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
379 if (*slot == 0)
381 *slot = ggc_alloc<reg_attrs> ();
382 memcpy (*slot, &attrs, sizeof (reg_attrs));
385 return (reg_attrs *) *slot;
389 #if !HAVE_blockage
390 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
391 and to block register equivalences to be seen across this insn. */
394 gen_blockage (void)
396 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
397 MEM_VOLATILE_P (x) = true;
398 return x;
400 #endif
403 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
404 don't attempt to share with the various global pieces of rtl (such as
405 frame_pointer_rtx). */
408 gen_raw_REG (machine_mode mode, int regno)
410 rtx x = gen_rtx_raw_REG (mode, regno);
411 ORIGINAL_REGNO (x) = regno;
412 return x;
415 /* There are some RTL codes that require special attention; the generation
416 functions do the raw handling. If you add to this list, modify
417 special_rtx in gengenrtl.c as well. */
419 rtx_expr_list *
420 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
422 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
423 expr_list));
426 rtx_insn_list *
427 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
429 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
430 insn_list));
433 rtx_insn *
434 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
435 basic_block bb, rtx pattern, int location, int code,
436 rtx reg_notes)
438 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
439 prev_insn, next_insn,
440 bb, pattern, location, code,
441 reg_notes));
445 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
447 void **slot;
449 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
450 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
452 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
453 if (const_true_rtx && arg == STORE_FLAG_VALUE)
454 return const_true_rtx;
455 #endif
457 /* Look up the CONST_INT in the hash table. */
458 slot = htab_find_slot_with_hash (const_int_htab, &arg,
459 (hashval_t) arg, INSERT);
460 if (*slot == 0)
461 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
463 return (rtx) *slot;
467 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
469 return GEN_INT (trunc_int_for_mode (c, mode));
472 /* CONST_DOUBLEs might be created from pairs of integers, or from
473 REAL_VALUE_TYPEs. Also, their length is known only at run time,
474 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
476 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
477 hash table. If so, return its counterpart; otherwise add it
478 to the hash table and return it. */
479 static rtx
480 lookup_const_double (rtx real)
482 void **slot = htab_find_slot (const_double_htab, real, INSERT);
483 if (*slot == 0)
484 *slot = real;
486 return (rtx) *slot;
489 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
490 VALUE in mode MODE. */
492 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
494 rtx real = rtx_alloc (CONST_DOUBLE);
495 PUT_MODE (real, mode);
497 real->u.rv = value;
499 return lookup_const_double (real);
502 /* Determine whether FIXED, a CONST_FIXED, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
506 static rtx
507 lookup_const_fixed (rtx fixed)
509 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
510 if (*slot == 0)
511 *slot = fixed;
513 return (rtx) *slot;
516 /* Return a CONST_FIXED rtx for a fixed-point value specified by
517 VALUE in mode MODE. */
520 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
522 rtx fixed = rtx_alloc (CONST_FIXED);
523 PUT_MODE (fixed, mode);
525 fixed->u.fv = value;
527 return lookup_const_fixed (fixed);
530 #if TARGET_SUPPORTS_WIDE_INT == 0
531 /* Constructs double_int from rtx CST. */
533 double_int
534 rtx_to_double_int (const_rtx cst)
536 double_int r;
538 if (CONST_INT_P (cst))
539 r = double_int::from_shwi (INTVAL (cst));
540 else if (CONST_DOUBLE_AS_INT_P (cst))
542 r.low = CONST_DOUBLE_LOW (cst);
543 r.high = CONST_DOUBLE_HIGH (cst);
545 else
546 gcc_unreachable ();
548 return r;
550 #endif
552 #if TARGET_SUPPORTS_WIDE_INT
553 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
554 If so, return its counterpart; otherwise add it to the hash table and
555 return it. */
557 static rtx
558 lookup_const_wide_int (rtx wint)
560 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
561 if (*slot == 0)
562 *slot = wint;
564 return (rtx) *slot;
566 #endif
568 /* Return an rtx constant for V, given that the constant has mode MODE.
569 The returned rtx will be a CONST_INT if V fits, otherwise it will be
570 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
571 (if TARGET_SUPPORTS_WIDE_INT). */
574 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
576 unsigned int len = v.get_len ();
577 unsigned int prec = GET_MODE_PRECISION (mode);
579 /* Allow truncation but not extension since we do not know if the
580 number is signed or unsigned. */
581 gcc_assert (prec <= v.get_precision ());
583 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
584 return gen_int_mode (v.elt (0), mode);
586 #if TARGET_SUPPORTS_WIDE_INT
588 unsigned int i;
589 rtx value;
590 unsigned int blocks_needed
591 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
593 if (len > blocks_needed)
594 len = blocks_needed;
596 value = const_wide_int_alloc (len);
598 /* It is so tempting to just put the mode in here. Must control
599 myself ... */
600 PUT_MODE (value, VOIDmode);
601 CWI_PUT_NUM_ELEM (value, len);
603 for (i = 0; i < len; i++)
604 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
606 return lookup_const_wide_int (value);
608 #else
609 return immed_double_const (v.elt (0), v.elt (1), mode);
610 #endif
613 #if TARGET_SUPPORTS_WIDE_INT == 0
614 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
615 of ints: I0 is the low-order word and I1 is the high-order word.
616 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
617 implied upper bits are copies of the high bit of i1. The value
618 itself is neither signed nor unsigned. Do not use this routine for
619 non-integer modes; convert to REAL_VALUE_TYPE and use
620 CONST_DOUBLE_FROM_REAL_VALUE. */
623 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
625 rtx value;
626 unsigned int i;
628 /* There are the following cases (note that there are no modes with
629 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
631 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
632 gen_int_mode.
633 2) If the value of the integer fits into HOST_WIDE_INT anyway
634 (i.e., i1 consists only from copies of the sign bit, and sign
635 of i0 and i1 are the same), then we return a CONST_INT for i0.
636 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
637 if (mode != VOIDmode)
639 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
640 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
641 /* We can get a 0 for an error mark. */
642 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
643 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
645 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
646 return gen_int_mode (i0, mode);
649 /* If this integer fits in one word, return a CONST_INT. */
650 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
651 return GEN_INT (i0);
653 /* We use VOIDmode for integers. */
654 value = rtx_alloc (CONST_DOUBLE);
655 PUT_MODE (value, VOIDmode);
657 CONST_DOUBLE_LOW (value) = i0;
658 CONST_DOUBLE_HIGH (value) = i1;
660 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
661 XWINT (value, i) = 0;
663 return lookup_const_double (value);
665 #endif
668 gen_rtx_REG (machine_mode mode, unsigned int regno)
670 /* In case the MD file explicitly references the frame pointer, have
671 all such references point to the same frame pointer. This is
672 used during frame pointer elimination to distinguish the explicit
673 references to these registers from pseudos that happened to be
674 assigned to them.
676 If we have eliminated the frame pointer or arg pointer, we will
677 be using it as a normal register, for example as a spill
678 register. In such cases, we might be accessing it in a mode that
679 is not Pmode and therefore cannot use the pre-allocated rtx.
681 Also don't do this when we are making new REGs in reload, since
682 we don't want to get confused with the real pointers. */
684 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
686 if (regno == FRAME_POINTER_REGNUM
687 && (!reload_completed || frame_pointer_needed))
688 return frame_pointer_rtx;
689 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
690 if (regno == HARD_FRAME_POINTER_REGNUM
691 && (!reload_completed || frame_pointer_needed))
692 return hard_frame_pointer_rtx;
693 #endif
694 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
695 if (regno == ARG_POINTER_REGNUM)
696 return arg_pointer_rtx;
697 #endif
698 #ifdef RETURN_ADDRESS_POINTER_REGNUM
699 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
700 return return_address_pointer_rtx;
701 #endif
702 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
703 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
704 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
705 return pic_offset_table_rtx;
706 if (regno == STACK_POINTER_REGNUM)
707 return stack_pointer_rtx;
710 #if 0
711 /* If the per-function register table has been set up, try to re-use
712 an existing entry in that table to avoid useless generation of RTL.
714 This code is disabled for now until we can fix the various backends
715 which depend on having non-shared hard registers in some cases. Long
716 term we want to re-enable this code as it can significantly cut down
717 on the amount of useless RTL that gets generated.
719 We'll also need to fix some code that runs after reload that wants to
720 set ORIGINAL_REGNO. */
722 if (cfun
723 && cfun->emit
724 && regno_reg_rtx
725 && regno < FIRST_PSEUDO_REGISTER
726 && reg_raw_mode[regno] == mode)
727 return regno_reg_rtx[regno];
728 #endif
730 return gen_raw_REG (mode, regno);
734 gen_rtx_MEM (machine_mode mode, rtx addr)
736 rtx rt = gen_rtx_raw_MEM (mode, addr);
738 /* This field is not cleared by the mere allocation of the rtx, so
739 we clear it here. */
740 MEM_ATTRS (rt) = 0;
742 return rt;
745 /* Generate a memory referring to non-trapping constant memory. */
748 gen_const_mem (machine_mode mode, rtx addr)
750 rtx mem = gen_rtx_MEM (mode, addr);
751 MEM_READONLY_P (mem) = 1;
752 MEM_NOTRAP_P (mem) = 1;
753 return mem;
756 /* Generate a MEM referring to fixed portions of the frame, e.g., register
757 save areas. */
760 gen_frame_mem (machine_mode mode, rtx addr)
762 rtx mem = gen_rtx_MEM (mode, addr);
763 MEM_NOTRAP_P (mem) = 1;
764 set_mem_alias_set (mem, get_frame_alias_set ());
765 return mem;
768 /* Generate a MEM referring to a temporary use of the stack, not part
769 of the fixed stack frame. For example, something which is pushed
770 by a target splitter. */
772 gen_tmp_stack_mem (machine_mode mode, rtx addr)
774 rtx mem = gen_rtx_MEM (mode, addr);
775 MEM_NOTRAP_P (mem) = 1;
776 if (!cfun->calls_alloca)
777 set_mem_alias_set (mem, get_frame_alias_set ());
778 return mem;
781 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
782 this construct would be valid, and false otherwise. */
784 bool
785 validate_subreg (machine_mode omode, machine_mode imode,
786 const_rtx reg, unsigned int offset)
788 unsigned int isize = GET_MODE_SIZE (imode);
789 unsigned int osize = GET_MODE_SIZE (omode);
791 /* All subregs must be aligned. */
792 if (offset % osize != 0)
793 return false;
795 /* The subreg offset cannot be outside the inner object. */
796 if (offset >= isize)
797 return false;
799 /* ??? This should not be here. Temporarily continue to allow word_mode
800 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
801 Generally, backends are doing something sketchy but it'll take time to
802 fix them all. */
803 if (omode == word_mode)
805 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
806 is the culprit here, and not the backends. */
807 else if (osize >= UNITS_PER_WORD && isize >= osize)
809 /* Allow component subregs of complex and vector. Though given the below
810 extraction rules, it's not always clear what that means. */
811 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
812 && GET_MODE_INNER (imode) == omode)
814 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
815 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
816 represent this. It's questionable if this ought to be represented at
817 all -- why can't this all be hidden in post-reload splitters that make
818 arbitrarily mode changes to the registers themselves. */
819 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
821 /* Subregs involving floating point modes are not allowed to
822 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
823 (subreg:SI (reg:DF) 0) isn't. */
824 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
826 if (! (isize == osize
827 /* LRA can use subreg to store a floating point value in
828 an integer mode. Although the floating point and the
829 integer modes need the same number of hard registers,
830 the size of floating point mode can be less than the
831 integer mode. LRA also uses subregs for a register
832 should be used in different mode in on insn. */
833 || lra_in_progress))
834 return false;
837 /* Paradoxical subregs must have offset zero. */
838 if (osize > isize)
839 return offset == 0;
841 /* This is a normal subreg. Verify that the offset is representable. */
843 /* For hard registers, we already have most of these rules collected in
844 subreg_offset_representable_p. */
845 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
847 unsigned int regno = REGNO (reg);
849 #ifdef CANNOT_CHANGE_MODE_CLASS
850 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
851 && GET_MODE_INNER (imode) == omode)
853 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
854 return false;
855 #endif
857 return subreg_offset_representable_p (regno, imode, offset, omode);
860 /* For pseudo registers, we want most of the same checks. Namely:
861 If the register no larger than a word, the subreg must be lowpart.
862 If the register is larger than a word, the subreg must be the lowpart
863 of a subword. A subreg does *not* perform arbitrary bit extraction.
864 Given that we've already checked mode/offset alignment, we only have
865 to check subword subregs here. */
866 if (osize < UNITS_PER_WORD
867 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
869 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
870 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
871 if (offset % UNITS_PER_WORD != low_off)
872 return false;
874 return true;
878 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
880 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
881 return gen_rtx_raw_SUBREG (mode, reg, offset);
884 /* Generate a SUBREG representing the least-significant part of REG if MODE
885 is smaller than mode of REG, otherwise paradoxical SUBREG. */
888 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
890 machine_mode inmode;
892 inmode = GET_MODE (reg);
893 if (inmode == VOIDmode)
894 inmode = mode;
895 return gen_rtx_SUBREG (mode, reg,
896 subreg_lowpart_offset (mode, inmode));
900 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
901 enum var_init_status status)
903 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
904 PAT_VAR_LOCATION_STATUS (x) = status;
905 return x;
909 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
911 rtvec
912 gen_rtvec (int n, ...)
914 int i;
915 rtvec rt_val;
916 va_list p;
918 va_start (p, n);
920 /* Don't allocate an empty rtvec... */
921 if (n == 0)
923 va_end (p);
924 return NULL_RTVEC;
927 rt_val = rtvec_alloc (n);
929 for (i = 0; i < n; i++)
930 rt_val->elem[i] = va_arg (p, rtx);
932 va_end (p);
933 return rt_val;
936 rtvec
937 gen_rtvec_v (int n, rtx *argp)
939 int i;
940 rtvec rt_val;
942 /* Don't allocate an empty rtvec... */
943 if (n == 0)
944 return NULL_RTVEC;
946 rt_val = rtvec_alloc (n);
948 for (i = 0; i < n; i++)
949 rt_val->elem[i] = *argp++;
951 return rt_val;
954 rtvec
955 gen_rtvec_v (int n, rtx_insn **argp)
957 int i;
958 rtvec rt_val;
960 /* Don't allocate an empty rtvec... */
961 if (n == 0)
962 return NULL_RTVEC;
964 rt_val = rtvec_alloc (n);
966 for (i = 0; i < n; i++)
967 rt_val->elem[i] = *argp++;
969 return rt_val;
973 /* Return the number of bytes between the start of an OUTER_MODE
974 in-memory value and the start of an INNER_MODE in-memory value,
975 given that the former is a lowpart of the latter. It may be a
976 paradoxical lowpart, in which case the offset will be negative
977 on big-endian targets. */
980 byte_lowpart_offset (machine_mode outer_mode,
981 machine_mode inner_mode)
983 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
984 return subreg_lowpart_offset (outer_mode, inner_mode);
985 else
986 return -subreg_lowpart_offset (inner_mode, outer_mode);
989 /* Generate a REG rtx for a new pseudo register of mode MODE.
990 This pseudo is assigned the next sequential register number. */
993 gen_reg_rtx (machine_mode mode)
995 rtx val;
996 unsigned int align = GET_MODE_ALIGNMENT (mode);
998 gcc_assert (can_create_pseudo_p ());
1000 /* If a virtual register with bigger mode alignment is generated,
1001 increase stack alignment estimation because it might be spilled
1002 to stack later. */
1003 if (SUPPORTS_STACK_ALIGNMENT
1004 && crtl->stack_alignment_estimated < align
1005 && !crtl->stack_realign_processed)
1007 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1008 if (crtl->stack_alignment_estimated < min_align)
1009 crtl->stack_alignment_estimated = min_align;
1012 if (generating_concat_p
1013 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1014 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1016 /* For complex modes, don't make a single pseudo.
1017 Instead, make a CONCAT of two pseudos.
1018 This allows noncontiguous allocation of the real and imaginary parts,
1019 which makes much better code. Besides, allocating DCmode
1020 pseudos overstrains reload on some machines like the 386. */
1021 rtx realpart, imagpart;
1022 machine_mode partmode = GET_MODE_INNER (mode);
1024 realpart = gen_reg_rtx (partmode);
1025 imagpart = gen_reg_rtx (partmode);
1026 return gen_rtx_CONCAT (mode, realpart, imagpart);
1029 /* Do not call gen_reg_rtx with uninitialized crtl. */
1030 gcc_assert (crtl->emit.regno_pointer_align_length);
1032 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1033 enough to have an element for this pseudo reg number. */
1035 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1037 int old_size = crtl->emit.regno_pointer_align_length;
1038 char *tmp;
1039 rtx *new1;
1041 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1042 memset (tmp + old_size, 0, old_size);
1043 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1045 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1046 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1047 regno_reg_rtx = new1;
1049 crtl->emit.regno_pointer_align_length = old_size * 2;
1052 val = gen_raw_REG (mode, reg_rtx_no);
1053 regno_reg_rtx[reg_rtx_no++] = val;
1054 return val;
1057 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1059 bool
1060 reg_is_parm_p (rtx reg)
1062 tree decl;
1064 gcc_assert (REG_P (reg));
1065 decl = REG_EXPR (reg);
1066 return (decl && TREE_CODE (decl) == PARM_DECL);
1069 /* Update NEW with the same attributes as REG, but with OFFSET added
1070 to the REG_OFFSET. */
1072 static void
1073 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1075 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1076 REG_OFFSET (reg) + offset);
1079 /* Generate a register with same attributes as REG, but with OFFSET
1080 added to the REG_OFFSET. */
1083 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1084 int offset)
1086 rtx new_rtx = gen_rtx_REG (mode, regno);
1088 update_reg_offset (new_rtx, reg, offset);
1089 return new_rtx;
1092 /* Generate a new pseudo-register with the same attributes as REG, but
1093 with OFFSET added to the REG_OFFSET. */
1096 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1098 rtx new_rtx = gen_reg_rtx (mode);
1100 update_reg_offset (new_rtx, reg, offset);
1101 return new_rtx;
1104 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1105 new register is a (possibly paradoxical) lowpart of the old one. */
1107 void
1108 adjust_reg_mode (rtx reg, machine_mode mode)
1110 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1111 PUT_MODE (reg, mode);
1114 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1115 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1117 void
1118 set_reg_attrs_from_value (rtx reg, rtx x)
1120 int offset;
1121 bool can_be_reg_pointer = true;
1123 /* Don't call mark_reg_pointer for incompatible pointer sign
1124 extension. */
1125 while (GET_CODE (x) == SIGN_EXTEND
1126 || GET_CODE (x) == ZERO_EXTEND
1127 || GET_CODE (x) == TRUNCATE
1128 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1130 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1131 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1132 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1133 can_be_reg_pointer = false;
1134 #endif
1135 x = XEXP (x, 0);
1138 /* Hard registers can be reused for multiple purposes within the same
1139 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1140 on them is wrong. */
1141 if (HARD_REGISTER_P (reg))
1142 return;
1144 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1145 if (MEM_P (x))
1147 if (MEM_OFFSET_KNOWN_P (x))
1148 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1149 MEM_OFFSET (x) + offset);
1150 if (can_be_reg_pointer && MEM_POINTER (x))
1151 mark_reg_pointer (reg, 0);
1153 else if (REG_P (x))
1155 if (REG_ATTRS (x))
1156 update_reg_offset (reg, x, offset);
1157 if (can_be_reg_pointer && REG_POINTER (x))
1158 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1162 /* Generate a REG rtx for a new pseudo register, copying the mode
1163 and attributes from X. */
1166 gen_reg_rtx_and_attrs (rtx x)
1168 rtx reg = gen_reg_rtx (GET_MODE (x));
1169 set_reg_attrs_from_value (reg, x);
1170 return reg;
1173 /* Set the register attributes for registers contained in PARM_RTX.
1174 Use needed values from memory attributes of MEM. */
1176 void
1177 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1179 if (REG_P (parm_rtx))
1180 set_reg_attrs_from_value (parm_rtx, mem);
1181 else if (GET_CODE (parm_rtx) == PARALLEL)
1183 /* Check for a NULL entry in the first slot, used to indicate that the
1184 parameter goes both on the stack and in registers. */
1185 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1186 for (; i < XVECLEN (parm_rtx, 0); i++)
1188 rtx x = XVECEXP (parm_rtx, 0, i);
1189 if (REG_P (XEXP (x, 0)))
1190 REG_ATTRS (XEXP (x, 0))
1191 = get_reg_attrs (MEM_EXPR (mem),
1192 INTVAL (XEXP (x, 1)));
1197 /* Set the REG_ATTRS for registers in value X, given that X represents
1198 decl T. */
1200 void
1201 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1203 if (GET_CODE (x) == SUBREG)
1205 gcc_assert (subreg_lowpart_p (x));
1206 x = SUBREG_REG (x);
1208 if (REG_P (x))
1209 REG_ATTRS (x)
1210 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1211 DECL_MODE (t)));
1212 if (GET_CODE (x) == CONCAT)
1214 if (REG_P (XEXP (x, 0)))
1215 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1216 if (REG_P (XEXP (x, 1)))
1217 REG_ATTRS (XEXP (x, 1))
1218 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1220 if (GET_CODE (x) == PARALLEL)
1222 int i, start;
1224 /* Check for a NULL entry, used to indicate that the parameter goes
1225 both on the stack and in registers. */
1226 if (XEXP (XVECEXP (x, 0, 0), 0))
1227 start = 0;
1228 else
1229 start = 1;
1231 for (i = start; i < XVECLEN (x, 0); i++)
1233 rtx y = XVECEXP (x, 0, i);
1234 if (REG_P (XEXP (y, 0)))
1235 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1240 /* Assign the RTX X to declaration T. */
1242 void
1243 set_decl_rtl (tree t, rtx x)
1245 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1246 if (x)
1247 set_reg_attrs_for_decl_rtl (t, x);
1250 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1251 if the ABI requires the parameter to be passed by reference. */
1253 void
1254 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1256 DECL_INCOMING_RTL (t) = x;
1257 if (x && !by_reference_p)
1258 set_reg_attrs_for_decl_rtl (t, x);
1261 /* Identify REG (which may be a CONCAT) as a user register. */
1263 void
1264 mark_user_reg (rtx reg)
1266 if (GET_CODE (reg) == CONCAT)
1268 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1269 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1271 else
1273 gcc_assert (REG_P (reg));
1274 REG_USERVAR_P (reg) = 1;
1278 /* Identify REG as a probable pointer register and show its alignment
1279 as ALIGN, if nonzero. */
1281 void
1282 mark_reg_pointer (rtx reg, int align)
1284 if (! REG_POINTER (reg))
1286 REG_POINTER (reg) = 1;
1288 if (align)
1289 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1291 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1292 /* We can no-longer be sure just how aligned this pointer is. */
1293 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1296 /* Return 1 plus largest pseudo reg number used in the current function. */
1299 max_reg_num (void)
1301 return reg_rtx_no;
1304 /* Return 1 + the largest label number used so far in the current function. */
1307 max_label_num (void)
1309 return label_num;
1312 /* Return first label number used in this function (if any were used). */
1315 get_first_label_num (void)
1317 return first_label_num;
1320 /* If the rtx for label was created during the expansion of a nested
1321 function, then first_label_num won't include this label number.
1322 Fix this now so that array indices work later. */
1324 void
1325 maybe_set_first_label_num (rtx x)
1327 if (CODE_LABEL_NUMBER (x) < first_label_num)
1328 first_label_num = CODE_LABEL_NUMBER (x);
1331 /* Return a value representing some low-order bits of X, where the number
1332 of low-order bits is given by MODE. Note that no conversion is done
1333 between floating-point and fixed-point values, rather, the bit
1334 representation is returned.
1336 This function handles the cases in common between gen_lowpart, below,
1337 and two variants in cse.c and combine.c. These are the cases that can
1338 be safely handled at all points in the compilation.
1340 If this is not a case we can handle, return 0. */
1343 gen_lowpart_common (machine_mode mode, rtx x)
1345 int msize = GET_MODE_SIZE (mode);
1346 int xsize;
1347 int offset = 0;
1348 machine_mode innermode;
1350 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1351 so we have to make one up. Yuk. */
1352 innermode = GET_MODE (x);
1353 if (CONST_INT_P (x)
1354 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1355 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1356 else if (innermode == VOIDmode)
1357 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1359 xsize = GET_MODE_SIZE (innermode);
1361 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1363 if (innermode == mode)
1364 return x;
1366 /* MODE must occupy no more words than the mode of X. */
1367 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1368 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1369 return 0;
1371 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1372 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1373 return 0;
1375 offset = subreg_lowpart_offset (mode, innermode);
1377 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1378 && (GET_MODE_CLASS (mode) == MODE_INT
1379 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1381 /* If we are getting the low-order part of something that has been
1382 sign- or zero-extended, we can either just use the object being
1383 extended or make a narrower extension. If we want an even smaller
1384 piece than the size of the object being extended, call ourselves
1385 recursively.
1387 This case is used mostly by combine and cse. */
1389 if (GET_MODE (XEXP (x, 0)) == mode)
1390 return XEXP (x, 0);
1391 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1392 return gen_lowpart_common (mode, XEXP (x, 0));
1393 else if (msize < xsize)
1394 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1396 else if (GET_CODE (x) == SUBREG || REG_P (x)
1397 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1398 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1399 return simplify_gen_subreg (mode, x, innermode, offset);
1401 /* Otherwise, we can't do this. */
1402 return 0;
1406 gen_highpart (machine_mode mode, rtx x)
1408 unsigned int msize = GET_MODE_SIZE (mode);
1409 rtx result;
1411 /* This case loses if X is a subreg. To catch bugs early,
1412 complain if an invalid MODE is used even in other cases. */
1413 gcc_assert (msize <= UNITS_PER_WORD
1414 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1416 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1417 subreg_highpart_offset (mode, GET_MODE (x)));
1418 gcc_assert (result);
1420 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1421 the target if we have a MEM. gen_highpart must return a valid operand,
1422 emitting code if necessary to do so. */
1423 if (MEM_P (result))
1425 result = validize_mem (result);
1426 gcc_assert (result);
1429 return result;
1432 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1433 be VOIDmode constant. */
1435 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1437 if (GET_MODE (exp) != VOIDmode)
1439 gcc_assert (GET_MODE (exp) == innermode);
1440 return gen_highpart (outermode, exp);
1442 return simplify_gen_subreg (outermode, exp, innermode,
1443 subreg_highpart_offset (outermode, innermode));
1446 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1448 unsigned int
1449 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1451 unsigned int offset = 0;
1452 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1454 if (difference > 0)
1456 if (WORDS_BIG_ENDIAN)
1457 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1458 if (BYTES_BIG_ENDIAN)
1459 offset += difference % UNITS_PER_WORD;
1462 return offset;
1465 /* Return offset in bytes to get OUTERMODE high part
1466 of the value in mode INNERMODE stored in memory in target format. */
1467 unsigned int
1468 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1470 unsigned int offset = 0;
1471 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1473 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1475 if (difference > 0)
1477 if (! WORDS_BIG_ENDIAN)
1478 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1479 if (! BYTES_BIG_ENDIAN)
1480 offset += difference % UNITS_PER_WORD;
1483 return offset;
1486 /* Return 1 iff X, assumed to be a SUBREG,
1487 refers to the least significant part of its containing reg.
1488 If X is not a SUBREG, always return 1 (it is its own low part!). */
1491 subreg_lowpart_p (const_rtx x)
1493 if (GET_CODE (x) != SUBREG)
1494 return 1;
1495 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1496 return 0;
1498 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1499 == SUBREG_BYTE (x));
1502 /* Return true if X is a paradoxical subreg, false otherwise. */
1503 bool
1504 paradoxical_subreg_p (const_rtx x)
1506 if (GET_CODE (x) != SUBREG)
1507 return false;
1508 return (GET_MODE_PRECISION (GET_MODE (x))
1509 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1512 /* Return subword OFFSET of operand OP.
1513 The word number, OFFSET, is interpreted as the word number starting
1514 at the low-order address. OFFSET 0 is the low-order word if not
1515 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1517 If we cannot extract the required word, we return zero. Otherwise,
1518 an rtx corresponding to the requested word will be returned.
1520 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1521 reload has completed, a valid address will always be returned. After
1522 reload, if a valid address cannot be returned, we return zero.
1524 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1525 it is the responsibility of the caller.
1527 MODE is the mode of OP in case it is a CONST_INT.
1529 ??? This is still rather broken for some cases. The problem for the
1530 moment is that all callers of this thing provide no 'goal mode' to
1531 tell us to work with. This exists because all callers were written
1532 in a word based SUBREG world.
1533 Now use of this function can be deprecated by simplify_subreg in most
1534 cases.
1538 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1540 if (mode == VOIDmode)
1541 mode = GET_MODE (op);
1543 gcc_assert (mode != VOIDmode);
1545 /* If OP is narrower than a word, fail. */
1546 if (mode != BLKmode
1547 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1548 return 0;
1550 /* If we want a word outside OP, return zero. */
1551 if (mode != BLKmode
1552 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1553 return const0_rtx;
1555 /* Form a new MEM at the requested address. */
1556 if (MEM_P (op))
1558 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1560 if (! validate_address)
1561 return new_rtx;
1563 else if (reload_completed)
1565 if (! strict_memory_address_addr_space_p (word_mode,
1566 XEXP (new_rtx, 0),
1567 MEM_ADDR_SPACE (op)))
1568 return 0;
1570 else
1571 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1574 /* Rest can be handled by simplify_subreg. */
1575 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1578 /* Similar to `operand_subword', but never return 0. If we can't
1579 extract the required subword, put OP into a register and try again.
1580 The second attempt must succeed. We always validate the address in
1581 this case.
1583 MODE is the mode of OP, in case it is CONST_INT. */
1586 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1588 rtx result = operand_subword (op, offset, 1, mode);
1590 if (result)
1591 return result;
1593 if (mode != BLKmode && mode != VOIDmode)
1595 /* If this is a register which can not be accessed by words, copy it
1596 to a pseudo register. */
1597 if (REG_P (op))
1598 op = copy_to_reg (op);
1599 else
1600 op = force_reg (mode, op);
1603 result = operand_subword (op, offset, 1, mode);
1604 gcc_assert (result);
1606 return result;
1609 /* Returns 1 if both MEM_EXPR can be considered equal
1610 and 0 otherwise. */
1613 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1615 if (expr1 == expr2)
1616 return 1;
1618 if (! expr1 || ! expr2)
1619 return 0;
1621 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1622 return 0;
1624 return operand_equal_p (expr1, expr2, 0);
1627 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1628 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1629 -1 if not known. */
1632 get_mem_align_offset (rtx mem, unsigned int align)
1634 tree expr;
1635 unsigned HOST_WIDE_INT offset;
1637 /* This function can't use
1638 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1639 || (MAX (MEM_ALIGN (mem),
1640 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1641 < align))
1642 return -1;
1643 else
1644 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1645 for two reasons:
1646 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1647 for <variable>. get_inner_reference doesn't handle it and
1648 even if it did, the alignment in that case needs to be determined
1649 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1650 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1651 isn't sufficiently aligned, the object it is in might be. */
1652 gcc_assert (MEM_P (mem));
1653 expr = MEM_EXPR (mem);
1654 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1655 return -1;
1657 offset = MEM_OFFSET (mem);
1658 if (DECL_P (expr))
1660 if (DECL_ALIGN (expr) < align)
1661 return -1;
1663 else if (INDIRECT_REF_P (expr))
1665 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1666 return -1;
1668 else if (TREE_CODE (expr) == COMPONENT_REF)
1670 while (1)
1672 tree inner = TREE_OPERAND (expr, 0);
1673 tree field = TREE_OPERAND (expr, 1);
1674 tree byte_offset = component_ref_field_offset (expr);
1675 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1677 if (!byte_offset
1678 || !tree_fits_uhwi_p (byte_offset)
1679 || !tree_fits_uhwi_p (bit_offset))
1680 return -1;
1682 offset += tree_to_uhwi (byte_offset);
1683 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1685 if (inner == NULL_TREE)
1687 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1688 < (unsigned int) align)
1689 return -1;
1690 break;
1692 else if (DECL_P (inner))
1694 if (DECL_ALIGN (inner) < align)
1695 return -1;
1696 break;
1698 else if (TREE_CODE (inner) != COMPONENT_REF)
1699 return -1;
1700 expr = inner;
1703 else
1704 return -1;
1706 return offset & ((align / BITS_PER_UNIT) - 1);
1709 /* Given REF (a MEM) and T, either the type of X or the expression
1710 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1711 if we are making a new object of this type. BITPOS is nonzero if
1712 there is an offset outstanding on T that will be applied later. */
1714 void
1715 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1716 HOST_WIDE_INT bitpos)
1718 HOST_WIDE_INT apply_bitpos = 0;
1719 tree type;
1720 struct mem_attrs attrs, *defattrs, *refattrs;
1721 addr_space_t as;
1723 /* It can happen that type_for_mode was given a mode for which there
1724 is no language-level type. In which case it returns NULL, which
1725 we can see here. */
1726 if (t == NULL_TREE)
1727 return;
1729 type = TYPE_P (t) ? t : TREE_TYPE (t);
1730 if (type == error_mark_node)
1731 return;
1733 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1734 wrong answer, as it assumes that DECL_RTL already has the right alias
1735 info. Callers should not set DECL_RTL until after the call to
1736 set_mem_attributes. */
1737 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1739 memset (&attrs, 0, sizeof (attrs));
1741 /* Get the alias set from the expression or type (perhaps using a
1742 front-end routine) and use it. */
1743 attrs.alias = get_alias_set (t);
1745 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1746 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1748 /* Default values from pre-existing memory attributes if present. */
1749 refattrs = MEM_ATTRS (ref);
1750 if (refattrs)
1752 /* ??? Can this ever happen? Calling this routine on a MEM that
1753 already carries memory attributes should probably be invalid. */
1754 attrs.expr = refattrs->expr;
1755 attrs.offset_known_p = refattrs->offset_known_p;
1756 attrs.offset = refattrs->offset;
1757 attrs.size_known_p = refattrs->size_known_p;
1758 attrs.size = refattrs->size;
1759 attrs.align = refattrs->align;
1762 /* Otherwise, default values from the mode of the MEM reference. */
1763 else
1765 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1766 gcc_assert (!defattrs->expr);
1767 gcc_assert (!defattrs->offset_known_p);
1769 /* Respect mode size. */
1770 attrs.size_known_p = defattrs->size_known_p;
1771 attrs.size = defattrs->size;
1772 /* ??? Is this really necessary? We probably should always get
1773 the size from the type below. */
1775 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1776 if T is an object, always compute the object alignment below. */
1777 if (TYPE_P (t))
1778 attrs.align = defattrs->align;
1779 else
1780 attrs.align = BITS_PER_UNIT;
1781 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1782 e.g. if the type carries an alignment attribute. Should we be
1783 able to simply always use TYPE_ALIGN? */
1786 /* We can set the alignment from the type if we are making an object,
1787 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1788 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1789 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1791 /* If the size is known, we can set that. */
1792 tree new_size = TYPE_SIZE_UNIT (type);
1794 /* The address-space is that of the type. */
1795 as = TYPE_ADDR_SPACE (type);
1797 /* If T is not a type, we may be able to deduce some more information about
1798 the expression. */
1799 if (! TYPE_P (t))
1801 tree base;
1803 if (TREE_THIS_VOLATILE (t))
1804 MEM_VOLATILE_P (ref) = 1;
1806 /* Now remove any conversions: they don't change what the underlying
1807 object is. Likewise for SAVE_EXPR. */
1808 while (CONVERT_EXPR_P (t)
1809 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1810 || TREE_CODE (t) == SAVE_EXPR)
1811 t = TREE_OPERAND (t, 0);
1813 /* Note whether this expression can trap. */
1814 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1816 base = get_base_address (t);
1817 if (base)
1819 if (DECL_P (base)
1820 && TREE_READONLY (base)
1821 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1822 && !TREE_THIS_VOLATILE (base))
1823 MEM_READONLY_P (ref) = 1;
1825 /* Mark static const strings readonly as well. */
1826 if (TREE_CODE (base) == STRING_CST
1827 && TREE_READONLY (base)
1828 && TREE_STATIC (base))
1829 MEM_READONLY_P (ref) = 1;
1831 /* Address-space information is on the base object. */
1832 if (TREE_CODE (base) == MEM_REF
1833 || TREE_CODE (base) == TARGET_MEM_REF)
1834 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1835 0))));
1836 else
1837 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1840 /* If this expression uses it's parent's alias set, mark it such
1841 that we won't change it. */
1842 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1843 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1845 /* If this is a decl, set the attributes of the MEM from it. */
1846 if (DECL_P (t))
1848 attrs.expr = t;
1849 attrs.offset_known_p = true;
1850 attrs.offset = 0;
1851 apply_bitpos = bitpos;
1852 new_size = DECL_SIZE_UNIT (t);
1855 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1856 else if (CONSTANT_CLASS_P (t))
1859 /* If this is a field reference, record it. */
1860 else if (TREE_CODE (t) == COMPONENT_REF)
1862 attrs.expr = t;
1863 attrs.offset_known_p = true;
1864 attrs.offset = 0;
1865 apply_bitpos = bitpos;
1866 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1867 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1870 /* If this is an array reference, look for an outer field reference. */
1871 else if (TREE_CODE (t) == ARRAY_REF)
1873 tree off_tree = size_zero_node;
1874 /* We can't modify t, because we use it at the end of the
1875 function. */
1876 tree t2 = t;
1880 tree index = TREE_OPERAND (t2, 1);
1881 tree low_bound = array_ref_low_bound (t2);
1882 tree unit_size = array_ref_element_size (t2);
1884 /* We assume all arrays have sizes that are a multiple of a byte.
1885 First subtract the lower bound, if any, in the type of the
1886 index, then convert to sizetype and multiply by the size of
1887 the array element. */
1888 if (! integer_zerop (low_bound))
1889 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1890 index, low_bound);
1892 off_tree = size_binop (PLUS_EXPR,
1893 size_binop (MULT_EXPR,
1894 fold_convert (sizetype,
1895 index),
1896 unit_size),
1897 off_tree);
1898 t2 = TREE_OPERAND (t2, 0);
1900 while (TREE_CODE (t2) == ARRAY_REF);
1902 if (DECL_P (t2)
1903 || TREE_CODE (t2) == COMPONENT_REF)
1905 attrs.expr = t2;
1906 attrs.offset_known_p = false;
1907 if (tree_fits_uhwi_p (off_tree))
1909 attrs.offset_known_p = true;
1910 attrs.offset = tree_to_uhwi (off_tree);
1911 apply_bitpos = bitpos;
1914 /* Else do not record a MEM_EXPR. */
1917 /* If this is an indirect reference, record it. */
1918 else if (TREE_CODE (t) == MEM_REF
1919 || TREE_CODE (t) == TARGET_MEM_REF)
1921 attrs.expr = t;
1922 attrs.offset_known_p = true;
1923 attrs.offset = 0;
1924 apply_bitpos = bitpos;
1927 /* Compute the alignment. */
1928 unsigned int obj_align;
1929 unsigned HOST_WIDE_INT obj_bitpos;
1930 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1931 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1932 if (obj_bitpos != 0)
1933 obj_align = (obj_bitpos & -obj_bitpos);
1934 attrs.align = MAX (attrs.align, obj_align);
1937 if (tree_fits_uhwi_p (new_size))
1939 attrs.size_known_p = true;
1940 attrs.size = tree_to_uhwi (new_size);
1943 /* If we modified OFFSET based on T, then subtract the outstanding
1944 bit position offset. Similarly, increase the size of the accessed
1945 object to contain the negative offset. */
1946 if (apply_bitpos)
1948 gcc_assert (attrs.offset_known_p);
1949 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1950 if (attrs.size_known_p)
1951 attrs.size += apply_bitpos / BITS_PER_UNIT;
1954 /* Now set the attributes we computed above. */
1955 attrs.addrspace = as;
1956 set_mem_attrs (ref, &attrs);
1959 void
1960 set_mem_attributes (rtx ref, tree t, int objectp)
1962 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1965 /* Set the alias set of MEM to SET. */
1967 void
1968 set_mem_alias_set (rtx mem, alias_set_type set)
1970 struct mem_attrs attrs;
1972 /* If the new and old alias sets don't conflict, something is wrong. */
1973 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1974 attrs = *get_mem_attrs (mem);
1975 attrs.alias = set;
1976 set_mem_attrs (mem, &attrs);
1979 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1981 void
1982 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1984 struct mem_attrs attrs;
1986 attrs = *get_mem_attrs (mem);
1987 attrs.addrspace = addrspace;
1988 set_mem_attrs (mem, &attrs);
1991 /* Set the alignment of MEM to ALIGN bits. */
1993 void
1994 set_mem_align (rtx mem, unsigned int align)
1996 struct mem_attrs attrs;
1998 attrs = *get_mem_attrs (mem);
1999 attrs.align = align;
2000 set_mem_attrs (mem, &attrs);
2003 /* Set the expr for MEM to EXPR. */
2005 void
2006 set_mem_expr (rtx mem, tree expr)
2008 struct mem_attrs attrs;
2010 attrs = *get_mem_attrs (mem);
2011 attrs.expr = expr;
2012 set_mem_attrs (mem, &attrs);
2015 /* Set the offset of MEM to OFFSET. */
2017 void
2018 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2020 struct mem_attrs attrs;
2022 attrs = *get_mem_attrs (mem);
2023 attrs.offset_known_p = true;
2024 attrs.offset = offset;
2025 set_mem_attrs (mem, &attrs);
2028 /* Clear the offset of MEM. */
2030 void
2031 clear_mem_offset (rtx mem)
2033 struct mem_attrs attrs;
2035 attrs = *get_mem_attrs (mem);
2036 attrs.offset_known_p = false;
2037 set_mem_attrs (mem, &attrs);
2040 /* Set the size of MEM to SIZE. */
2042 void
2043 set_mem_size (rtx mem, HOST_WIDE_INT size)
2045 struct mem_attrs attrs;
2047 attrs = *get_mem_attrs (mem);
2048 attrs.size_known_p = true;
2049 attrs.size = size;
2050 set_mem_attrs (mem, &attrs);
2053 /* Clear the size of MEM. */
2055 void
2056 clear_mem_size (rtx mem)
2058 struct mem_attrs attrs;
2060 attrs = *get_mem_attrs (mem);
2061 attrs.size_known_p = false;
2062 set_mem_attrs (mem, &attrs);
2065 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2066 and its address changed to ADDR. (VOIDmode means don't change the mode.
2067 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2068 returned memory location is required to be valid. INPLACE is true if any
2069 changes can be made directly to MEMREF or false if MEMREF must be treated
2070 as immutable.
2072 The memory attributes are not changed. */
2074 static rtx
2075 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2076 bool inplace)
2078 addr_space_t as;
2079 rtx new_rtx;
2081 gcc_assert (MEM_P (memref));
2082 as = MEM_ADDR_SPACE (memref);
2083 if (mode == VOIDmode)
2084 mode = GET_MODE (memref);
2085 if (addr == 0)
2086 addr = XEXP (memref, 0);
2087 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2088 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2089 return memref;
2091 /* Don't validate address for LRA. LRA can make the address valid
2092 by itself in most efficient way. */
2093 if (validate && !lra_in_progress)
2095 if (reload_in_progress || reload_completed)
2096 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2097 else
2098 addr = memory_address_addr_space (mode, addr, as);
2101 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2102 return memref;
2104 if (inplace)
2106 XEXP (memref, 0) = addr;
2107 return memref;
2110 new_rtx = gen_rtx_MEM (mode, addr);
2111 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2112 return new_rtx;
2115 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2116 way we are changing MEMREF, so we only preserve the alias set. */
2119 change_address (rtx memref, machine_mode mode, rtx addr)
2121 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2122 machine_mode mmode = GET_MODE (new_rtx);
2123 struct mem_attrs attrs, *defattrs;
2125 attrs = *get_mem_attrs (memref);
2126 defattrs = mode_mem_attrs[(int) mmode];
2127 attrs.expr = NULL_TREE;
2128 attrs.offset_known_p = false;
2129 attrs.size_known_p = defattrs->size_known_p;
2130 attrs.size = defattrs->size;
2131 attrs.align = defattrs->align;
2133 /* If there are no changes, just return the original memory reference. */
2134 if (new_rtx == memref)
2136 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2137 return new_rtx;
2139 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2140 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2143 set_mem_attrs (new_rtx, &attrs);
2144 return new_rtx;
2147 /* Return a memory reference like MEMREF, but with its mode changed
2148 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2149 nonzero, the memory address is forced to be valid.
2150 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2151 and the caller is responsible for adjusting MEMREF base register.
2152 If ADJUST_OBJECT is zero, the underlying object associated with the
2153 memory reference is left unchanged and the caller is responsible for
2154 dealing with it. Otherwise, if the new memory reference is outside
2155 the underlying object, even partially, then the object is dropped.
2156 SIZE, if nonzero, is the size of an access in cases where MODE
2157 has no inherent size. */
2160 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2161 int validate, int adjust_address, int adjust_object,
2162 HOST_WIDE_INT size)
2164 rtx addr = XEXP (memref, 0);
2165 rtx new_rtx;
2166 machine_mode address_mode;
2167 int pbits;
2168 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2169 unsigned HOST_WIDE_INT max_align;
2170 #ifdef POINTERS_EXTEND_UNSIGNED
2171 machine_mode pointer_mode
2172 = targetm.addr_space.pointer_mode (attrs.addrspace);
2173 #endif
2175 /* VOIDmode means no mode change for change_address_1. */
2176 if (mode == VOIDmode)
2177 mode = GET_MODE (memref);
2179 /* Take the size of non-BLKmode accesses from the mode. */
2180 defattrs = mode_mem_attrs[(int) mode];
2181 if (defattrs->size_known_p)
2182 size = defattrs->size;
2184 /* If there are no changes, just return the original memory reference. */
2185 if (mode == GET_MODE (memref) && !offset
2186 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2187 && (!validate || memory_address_addr_space_p (mode, addr,
2188 attrs.addrspace)))
2189 return memref;
2191 /* ??? Prefer to create garbage instead of creating shared rtl.
2192 This may happen even if offset is nonzero -- consider
2193 (plus (plus reg reg) const_int) -- so do this always. */
2194 addr = copy_rtx (addr);
2196 /* Convert a possibly large offset to a signed value within the
2197 range of the target address space. */
2198 address_mode = get_address_mode (memref);
2199 pbits = GET_MODE_BITSIZE (address_mode);
2200 if (HOST_BITS_PER_WIDE_INT > pbits)
2202 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2203 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2204 >> shift);
2207 if (adjust_address)
2209 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2210 object, we can merge it into the LO_SUM. */
2211 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2212 && offset >= 0
2213 && (unsigned HOST_WIDE_INT) offset
2214 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2215 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2216 plus_constant (address_mode,
2217 XEXP (addr, 1), offset));
2218 #ifdef POINTERS_EXTEND_UNSIGNED
2219 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2220 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2221 the fact that pointers are not allowed to overflow. */
2222 else if (POINTERS_EXTEND_UNSIGNED > 0
2223 && GET_CODE (addr) == ZERO_EXTEND
2224 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2225 && trunc_int_for_mode (offset, pointer_mode) == offset)
2226 addr = gen_rtx_ZERO_EXTEND (address_mode,
2227 plus_constant (pointer_mode,
2228 XEXP (addr, 0), offset));
2229 #endif
2230 else
2231 addr = plus_constant (address_mode, addr, offset);
2234 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2236 /* If the address is a REG, change_address_1 rightfully returns memref,
2237 but this would destroy memref's MEM_ATTRS. */
2238 if (new_rtx == memref && offset != 0)
2239 new_rtx = copy_rtx (new_rtx);
2241 /* Conservatively drop the object if we don't know where we start from. */
2242 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2244 attrs.expr = NULL_TREE;
2245 attrs.alias = 0;
2248 /* Compute the new values of the memory attributes due to this adjustment.
2249 We add the offsets and update the alignment. */
2250 if (attrs.offset_known_p)
2252 attrs.offset += offset;
2254 /* Drop the object if the new left end is not within its bounds. */
2255 if (adjust_object && attrs.offset < 0)
2257 attrs.expr = NULL_TREE;
2258 attrs.alias = 0;
2262 /* Compute the new alignment by taking the MIN of the alignment and the
2263 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2264 if zero. */
2265 if (offset != 0)
2267 max_align = (offset & -offset) * BITS_PER_UNIT;
2268 attrs.align = MIN (attrs.align, max_align);
2271 if (size)
2273 /* Drop the object if the new right end is not within its bounds. */
2274 if (adjust_object && (offset + size) > attrs.size)
2276 attrs.expr = NULL_TREE;
2277 attrs.alias = 0;
2279 attrs.size_known_p = true;
2280 attrs.size = size;
2282 else if (attrs.size_known_p)
2284 gcc_assert (!adjust_object);
2285 attrs.size -= offset;
2286 /* ??? The store_by_pieces machinery generates negative sizes,
2287 so don't assert for that here. */
2290 set_mem_attrs (new_rtx, &attrs);
2292 return new_rtx;
2295 /* Return a memory reference like MEMREF, but with its mode changed
2296 to MODE and its address changed to ADDR, which is assumed to be
2297 MEMREF offset by OFFSET bytes. If VALIDATE is
2298 nonzero, the memory address is forced to be valid. */
2301 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2302 HOST_WIDE_INT offset, int validate)
2304 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2305 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2308 /* Return a memory reference like MEMREF, but whose address is changed by
2309 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2310 known to be in OFFSET (possibly 1). */
2313 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2315 rtx new_rtx, addr = XEXP (memref, 0);
2316 machine_mode address_mode;
2317 struct mem_attrs attrs, *defattrs;
2319 attrs = *get_mem_attrs (memref);
2320 address_mode = get_address_mode (memref);
2321 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2323 /* At this point we don't know _why_ the address is invalid. It
2324 could have secondary memory references, multiplies or anything.
2326 However, if we did go and rearrange things, we can wind up not
2327 being able to recognize the magic around pic_offset_table_rtx.
2328 This stuff is fragile, and is yet another example of why it is
2329 bad to expose PIC machinery too early. */
2330 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2331 attrs.addrspace)
2332 && GET_CODE (addr) == PLUS
2333 && XEXP (addr, 0) == pic_offset_table_rtx)
2335 addr = force_reg (GET_MODE (addr), addr);
2336 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2339 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2340 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2342 /* If there are no changes, just return the original memory reference. */
2343 if (new_rtx == memref)
2344 return new_rtx;
2346 /* Update the alignment to reflect the offset. Reset the offset, which
2347 we don't know. */
2348 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2349 attrs.offset_known_p = false;
2350 attrs.size_known_p = defattrs->size_known_p;
2351 attrs.size = defattrs->size;
2352 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2353 set_mem_attrs (new_rtx, &attrs);
2354 return new_rtx;
2357 /* Return a memory reference like MEMREF, but with its address changed to
2358 ADDR. The caller is asserting that the actual piece of memory pointed
2359 to is the same, just the form of the address is being changed, such as
2360 by putting something into a register. INPLACE is true if any changes
2361 can be made directly to MEMREF or false if MEMREF must be treated as
2362 immutable. */
2365 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2367 /* change_address_1 copies the memory attribute structure without change
2368 and that's exactly what we want here. */
2369 update_temp_slot_address (XEXP (memref, 0), addr);
2370 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2373 /* Likewise, but the reference is not required to be valid. */
2376 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2378 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2381 /* Return a memory reference like MEMREF, but with its mode widened to
2382 MODE and offset by OFFSET. This would be used by targets that e.g.
2383 cannot issue QImode memory operations and have to use SImode memory
2384 operations plus masking logic. */
2387 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2389 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2390 struct mem_attrs attrs;
2391 unsigned int size = GET_MODE_SIZE (mode);
2393 /* If there are no changes, just return the original memory reference. */
2394 if (new_rtx == memref)
2395 return new_rtx;
2397 attrs = *get_mem_attrs (new_rtx);
2399 /* If we don't know what offset we were at within the expression, then
2400 we can't know if we've overstepped the bounds. */
2401 if (! attrs.offset_known_p)
2402 attrs.expr = NULL_TREE;
2404 while (attrs.expr)
2406 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2408 tree field = TREE_OPERAND (attrs.expr, 1);
2409 tree offset = component_ref_field_offset (attrs.expr);
2411 if (! DECL_SIZE_UNIT (field))
2413 attrs.expr = NULL_TREE;
2414 break;
2417 /* Is the field at least as large as the access? If so, ok,
2418 otherwise strip back to the containing structure. */
2419 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2420 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2421 && attrs.offset >= 0)
2422 break;
2424 if (! tree_fits_uhwi_p (offset))
2426 attrs.expr = NULL_TREE;
2427 break;
2430 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2431 attrs.offset += tree_to_uhwi (offset);
2432 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2433 / BITS_PER_UNIT);
2435 /* Similarly for the decl. */
2436 else if (DECL_P (attrs.expr)
2437 && DECL_SIZE_UNIT (attrs.expr)
2438 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2439 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2440 && (! attrs.offset_known_p || attrs.offset >= 0))
2441 break;
2442 else
2444 /* The widened memory access overflows the expression, which means
2445 that it could alias another expression. Zap it. */
2446 attrs.expr = NULL_TREE;
2447 break;
2451 if (! attrs.expr)
2452 attrs.offset_known_p = false;
2454 /* The widened memory may alias other stuff, so zap the alias set. */
2455 /* ??? Maybe use get_alias_set on any remaining expression. */
2456 attrs.alias = 0;
2457 attrs.size_known_p = true;
2458 attrs.size = size;
2459 set_mem_attrs (new_rtx, &attrs);
2460 return new_rtx;
2463 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2464 static GTY(()) tree spill_slot_decl;
2466 tree
2467 get_spill_slot_decl (bool force_build_p)
2469 tree d = spill_slot_decl;
2470 rtx rd;
2471 struct mem_attrs attrs;
2473 if (d || !force_build_p)
2474 return d;
2476 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2477 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2478 DECL_ARTIFICIAL (d) = 1;
2479 DECL_IGNORED_P (d) = 1;
2480 TREE_USED (d) = 1;
2481 spill_slot_decl = d;
2483 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2484 MEM_NOTRAP_P (rd) = 1;
2485 attrs = *mode_mem_attrs[(int) BLKmode];
2486 attrs.alias = new_alias_set ();
2487 attrs.expr = d;
2488 set_mem_attrs (rd, &attrs);
2489 SET_DECL_RTL (d, rd);
2491 return d;
2494 /* Given MEM, a result from assign_stack_local, fill in the memory
2495 attributes as appropriate for a register allocator spill slot.
2496 These slots are not aliasable by other memory. We arrange for
2497 them all to use a single MEM_EXPR, so that the aliasing code can
2498 work properly in the case of shared spill slots. */
2500 void
2501 set_mem_attrs_for_spill (rtx mem)
2503 struct mem_attrs attrs;
2504 rtx addr;
2506 attrs = *get_mem_attrs (mem);
2507 attrs.expr = get_spill_slot_decl (true);
2508 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2509 attrs.addrspace = ADDR_SPACE_GENERIC;
2511 /* We expect the incoming memory to be of the form:
2512 (mem:MODE (plus (reg sfp) (const_int offset)))
2513 with perhaps the plus missing for offset = 0. */
2514 addr = XEXP (mem, 0);
2515 attrs.offset_known_p = true;
2516 attrs.offset = 0;
2517 if (GET_CODE (addr) == PLUS
2518 && CONST_INT_P (XEXP (addr, 1)))
2519 attrs.offset = INTVAL (XEXP (addr, 1));
2521 set_mem_attrs (mem, &attrs);
2522 MEM_NOTRAP_P (mem) = 1;
2525 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2527 rtx_code_label *
2528 gen_label_rtx (void)
2530 return as_a <rtx_code_label *> (
2531 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2532 NULL, label_num++, NULL));
2535 /* For procedure integration. */
2537 /* Install new pointers to the first and last insns in the chain.
2538 Also, set cur_insn_uid to one higher than the last in use.
2539 Used for an inline-procedure after copying the insn chain. */
2541 void
2542 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2544 rtx_insn *insn;
2546 set_first_insn (first);
2547 set_last_insn (last);
2548 cur_insn_uid = 0;
2550 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2552 int debug_count = 0;
2554 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2555 cur_debug_insn_uid = 0;
2557 for (insn = first; insn; insn = NEXT_INSN (insn))
2558 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2559 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2560 else
2562 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2563 if (DEBUG_INSN_P (insn))
2564 debug_count++;
2567 if (debug_count)
2568 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2569 else
2570 cur_debug_insn_uid++;
2572 else
2573 for (insn = first; insn; insn = NEXT_INSN (insn))
2574 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2576 cur_insn_uid++;
2579 /* Go through all the RTL insn bodies and copy any invalid shared
2580 structure. This routine should only be called once. */
2582 static void
2583 unshare_all_rtl_1 (rtx_insn *insn)
2585 /* Unshare just about everything else. */
2586 unshare_all_rtl_in_chain (insn);
2588 /* Make sure the addresses of stack slots found outside the insn chain
2589 (such as, in DECL_RTL of a variable) are not shared
2590 with the insn chain.
2592 This special care is necessary when the stack slot MEM does not
2593 actually appear in the insn chain. If it does appear, its address
2594 is unshared from all else at that point. */
2595 stack_slot_list = safe_as_a <rtx_expr_list *> (
2596 copy_rtx_if_shared (stack_slot_list));
2599 /* Go through all the RTL insn bodies and copy any invalid shared
2600 structure, again. This is a fairly expensive thing to do so it
2601 should be done sparingly. */
2603 void
2604 unshare_all_rtl_again (rtx_insn *insn)
2606 rtx_insn *p;
2607 tree decl;
2609 for (p = insn; p; p = NEXT_INSN (p))
2610 if (INSN_P (p))
2612 reset_used_flags (PATTERN (p));
2613 reset_used_flags (REG_NOTES (p));
2614 if (CALL_P (p))
2615 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2618 /* Make sure that virtual stack slots are not shared. */
2619 set_used_decls (DECL_INITIAL (cfun->decl));
2621 /* Make sure that virtual parameters are not shared. */
2622 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2623 set_used_flags (DECL_RTL (decl));
2625 reset_used_flags (stack_slot_list);
2627 unshare_all_rtl_1 (insn);
2630 unsigned int
2631 unshare_all_rtl (void)
2633 unshare_all_rtl_1 (get_insns ());
2634 return 0;
2638 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2639 Recursively does the same for subexpressions. */
2641 static void
2642 verify_rtx_sharing (rtx orig, rtx insn)
2644 rtx x = orig;
2645 int i;
2646 enum rtx_code code;
2647 const char *format_ptr;
2649 if (x == 0)
2650 return;
2652 code = GET_CODE (x);
2654 /* These types may be freely shared. */
2656 switch (code)
2658 case REG:
2659 case DEBUG_EXPR:
2660 case VALUE:
2661 CASE_CONST_ANY:
2662 case SYMBOL_REF:
2663 case LABEL_REF:
2664 case CODE_LABEL:
2665 case PC:
2666 case CC0:
2667 case RETURN:
2668 case SIMPLE_RETURN:
2669 case SCRATCH:
2670 /* SCRATCH must be shared because they represent distinct values. */
2671 return;
2672 case CLOBBER:
2673 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2674 clobbers or clobbers of hard registers that originated as pseudos.
2675 This is needed to allow safe register renaming. */
2676 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2677 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2678 return;
2679 break;
2681 case CONST:
2682 if (shared_const_p (orig))
2683 return;
2684 break;
2686 case MEM:
2687 /* A MEM is allowed to be shared if its address is constant. */
2688 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2689 || reload_completed || reload_in_progress)
2690 return;
2692 break;
2694 default:
2695 break;
2698 /* This rtx may not be shared. If it has already been seen,
2699 replace it with a copy of itself. */
2700 #ifdef ENABLE_CHECKING
2701 if (RTX_FLAG (x, used))
2703 error ("invalid rtl sharing found in the insn");
2704 debug_rtx (insn);
2705 error ("shared rtx");
2706 debug_rtx (x);
2707 internal_error ("internal consistency failure");
2709 #endif
2710 gcc_assert (!RTX_FLAG (x, used));
2712 RTX_FLAG (x, used) = 1;
2714 /* Now scan the subexpressions recursively. */
2716 format_ptr = GET_RTX_FORMAT (code);
2718 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2720 switch (*format_ptr++)
2722 case 'e':
2723 verify_rtx_sharing (XEXP (x, i), insn);
2724 break;
2726 case 'E':
2727 if (XVEC (x, i) != NULL)
2729 int j;
2730 int len = XVECLEN (x, i);
2732 for (j = 0; j < len; j++)
2734 /* We allow sharing of ASM_OPERANDS inside single
2735 instruction. */
2736 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2737 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2738 == ASM_OPERANDS))
2739 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2740 else
2741 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2744 break;
2747 return;
2750 /* Reset used-flags for INSN. */
2752 static void
2753 reset_insn_used_flags (rtx insn)
2755 gcc_assert (INSN_P (insn));
2756 reset_used_flags (PATTERN (insn));
2757 reset_used_flags (REG_NOTES (insn));
2758 if (CALL_P (insn))
2759 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2762 /* Go through all the RTL insn bodies and clear all the USED bits. */
2764 static void
2765 reset_all_used_flags (void)
2767 rtx_insn *p;
2769 for (p = get_insns (); p; p = NEXT_INSN (p))
2770 if (INSN_P (p))
2772 rtx pat = PATTERN (p);
2773 if (GET_CODE (pat) != SEQUENCE)
2774 reset_insn_used_flags (p);
2775 else
2777 gcc_assert (REG_NOTES (p) == NULL);
2778 for (int i = 0; i < XVECLEN (pat, 0); i++)
2780 rtx insn = XVECEXP (pat, 0, i);
2781 if (INSN_P (insn))
2782 reset_insn_used_flags (insn);
2788 /* Verify sharing in INSN. */
2790 static void
2791 verify_insn_sharing (rtx insn)
2793 gcc_assert (INSN_P (insn));
2794 reset_used_flags (PATTERN (insn));
2795 reset_used_flags (REG_NOTES (insn));
2796 if (CALL_P (insn))
2797 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2800 /* Go through all the RTL insn bodies and check that there is no unexpected
2801 sharing in between the subexpressions. */
2803 DEBUG_FUNCTION void
2804 verify_rtl_sharing (void)
2806 rtx_insn *p;
2808 timevar_push (TV_VERIFY_RTL_SHARING);
2810 reset_all_used_flags ();
2812 for (p = get_insns (); p; p = NEXT_INSN (p))
2813 if (INSN_P (p))
2815 rtx pat = PATTERN (p);
2816 if (GET_CODE (pat) != SEQUENCE)
2817 verify_insn_sharing (p);
2818 else
2819 for (int i = 0; i < XVECLEN (pat, 0); i++)
2821 rtx insn = XVECEXP (pat, 0, i);
2822 if (INSN_P (insn))
2823 verify_insn_sharing (insn);
2827 reset_all_used_flags ();
2829 timevar_pop (TV_VERIFY_RTL_SHARING);
2832 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2833 Assumes the mark bits are cleared at entry. */
2835 void
2836 unshare_all_rtl_in_chain (rtx_insn *insn)
2838 for (; insn; insn = NEXT_INSN (insn))
2839 if (INSN_P (insn))
2841 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2842 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2843 if (CALL_P (insn))
2844 CALL_INSN_FUNCTION_USAGE (insn)
2845 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2849 /* Go through all virtual stack slots of a function and mark them as
2850 shared. We never replace the DECL_RTLs themselves with a copy,
2851 but expressions mentioned into a DECL_RTL cannot be shared with
2852 expressions in the instruction stream.
2854 Note that reload may convert pseudo registers into memories in-place.
2855 Pseudo registers are always shared, but MEMs never are. Thus if we
2856 reset the used flags on MEMs in the instruction stream, we must set
2857 them again on MEMs that appear in DECL_RTLs. */
2859 static void
2860 set_used_decls (tree blk)
2862 tree t;
2864 /* Mark decls. */
2865 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2866 if (DECL_RTL_SET_P (t))
2867 set_used_flags (DECL_RTL (t));
2869 /* Now process sub-blocks. */
2870 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2871 set_used_decls (t);
2874 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2875 Recursively does the same for subexpressions. Uses
2876 copy_rtx_if_shared_1 to reduce stack space. */
2879 copy_rtx_if_shared (rtx orig)
2881 copy_rtx_if_shared_1 (&orig);
2882 return orig;
2885 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2886 use. Recursively does the same for subexpressions. */
2888 static void
2889 copy_rtx_if_shared_1 (rtx *orig1)
2891 rtx x;
2892 int i;
2893 enum rtx_code code;
2894 rtx *last_ptr;
2895 const char *format_ptr;
2896 int copied = 0;
2897 int length;
2899 /* Repeat is used to turn tail-recursion into iteration. */
2900 repeat:
2901 x = *orig1;
2903 if (x == 0)
2904 return;
2906 code = GET_CODE (x);
2908 /* These types may be freely shared. */
2910 switch (code)
2912 case REG:
2913 case DEBUG_EXPR:
2914 case VALUE:
2915 CASE_CONST_ANY:
2916 case SYMBOL_REF:
2917 case LABEL_REF:
2918 case CODE_LABEL:
2919 case PC:
2920 case CC0:
2921 case RETURN:
2922 case SIMPLE_RETURN:
2923 case SCRATCH:
2924 /* SCRATCH must be shared because they represent distinct values. */
2925 return;
2926 case CLOBBER:
2927 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2928 clobbers or clobbers of hard registers that originated as pseudos.
2929 This is needed to allow safe register renaming. */
2930 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2931 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2932 return;
2933 break;
2935 case CONST:
2936 if (shared_const_p (x))
2937 return;
2938 break;
2940 case DEBUG_INSN:
2941 case INSN:
2942 case JUMP_INSN:
2943 case CALL_INSN:
2944 case NOTE:
2945 case BARRIER:
2946 /* The chain of insns is not being copied. */
2947 return;
2949 default:
2950 break;
2953 /* This rtx may not be shared. If it has already been seen,
2954 replace it with a copy of itself. */
2956 if (RTX_FLAG (x, used))
2958 x = shallow_copy_rtx (x);
2959 copied = 1;
2961 RTX_FLAG (x, used) = 1;
2963 /* Now scan the subexpressions recursively.
2964 We can store any replaced subexpressions directly into X
2965 since we know X is not shared! Any vectors in X
2966 must be copied if X was copied. */
2968 format_ptr = GET_RTX_FORMAT (code);
2969 length = GET_RTX_LENGTH (code);
2970 last_ptr = NULL;
2972 for (i = 0; i < length; i++)
2974 switch (*format_ptr++)
2976 case 'e':
2977 if (last_ptr)
2978 copy_rtx_if_shared_1 (last_ptr);
2979 last_ptr = &XEXP (x, i);
2980 break;
2982 case 'E':
2983 if (XVEC (x, i) != NULL)
2985 int j;
2986 int len = XVECLEN (x, i);
2988 /* Copy the vector iff I copied the rtx and the length
2989 is nonzero. */
2990 if (copied && len > 0)
2991 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2993 /* Call recursively on all inside the vector. */
2994 for (j = 0; j < len; j++)
2996 if (last_ptr)
2997 copy_rtx_if_shared_1 (last_ptr);
2998 last_ptr = &XVECEXP (x, i, j);
3001 break;
3004 *orig1 = x;
3005 if (last_ptr)
3007 orig1 = last_ptr;
3008 goto repeat;
3010 return;
3013 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3015 static void
3016 mark_used_flags (rtx x, int flag)
3018 int i, j;
3019 enum rtx_code code;
3020 const char *format_ptr;
3021 int length;
3023 /* Repeat is used to turn tail-recursion into iteration. */
3024 repeat:
3025 if (x == 0)
3026 return;
3028 code = GET_CODE (x);
3030 /* These types may be freely shared so we needn't do any resetting
3031 for them. */
3033 switch (code)
3035 case REG:
3036 case DEBUG_EXPR:
3037 case VALUE:
3038 CASE_CONST_ANY:
3039 case SYMBOL_REF:
3040 case CODE_LABEL:
3041 case PC:
3042 case CC0:
3043 case RETURN:
3044 case SIMPLE_RETURN:
3045 return;
3047 case DEBUG_INSN:
3048 case INSN:
3049 case JUMP_INSN:
3050 case CALL_INSN:
3051 case NOTE:
3052 case LABEL_REF:
3053 case BARRIER:
3054 /* The chain of insns is not being copied. */
3055 return;
3057 default:
3058 break;
3061 RTX_FLAG (x, used) = flag;
3063 format_ptr = GET_RTX_FORMAT (code);
3064 length = GET_RTX_LENGTH (code);
3066 for (i = 0; i < length; i++)
3068 switch (*format_ptr++)
3070 case 'e':
3071 if (i == length-1)
3073 x = XEXP (x, i);
3074 goto repeat;
3076 mark_used_flags (XEXP (x, i), flag);
3077 break;
3079 case 'E':
3080 for (j = 0; j < XVECLEN (x, i); j++)
3081 mark_used_flags (XVECEXP (x, i, j), flag);
3082 break;
3087 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3088 to look for shared sub-parts. */
3090 void
3091 reset_used_flags (rtx x)
3093 mark_used_flags (x, 0);
3096 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3097 to look for shared sub-parts. */
3099 void
3100 set_used_flags (rtx x)
3102 mark_used_flags (x, 1);
3105 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3106 Return X or the rtx for the pseudo reg the value of X was copied into.
3107 OTHER must be valid as a SET_DEST. */
3110 make_safe_from (rtx x, rtx other)
3112 while (1)
3113 switch (GET_CODE (other))
3115 case SUBREG:
3116 other = SUBREG_REG (other);
3117 break;
3118 case STRICT_LOW_PART:
3119 case SIGN_EXTEND:
3120 case ZERO_EXTEND:
3121 other = XEXP (other, 0);
3122 break;
3123 default:
3124 goto done;
3126 done:
3127 if ((MEM_P (other)
3128 && ! CONSTANT_P (x)
3129 && !REG_P (x)
3130 && GET_CODE (x) != SUBREG)
3131 || (REG_P (other)
3132 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3133 || reg_mentioned_p (other, x))))
3135 rtx temp = gen_reg_rtx (GET_MODE (x));
3136 emit_move_insn (temp, x);
3137 return temp;
3139 return x;
3142 /* Emission of insns (adding them to the doubly-linked list). */
3144 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3146 rtx_insn *
3147 get_last_insn_anywhere (void)
3149 struct sequence_stack *stack;
3150 if (get_last_insn ())
3151 return get_last_insn ();
3152 for (stack = seq_stack; stack; stack = stack->next)
3153 if (stack->last != 0)
3154 return stack->last;
3155 return 0;
3158 /* Return the first nonnote insn emitted in current sequence or current
3159 function. This routine looks inside SEQUENCEs. */
3161 rtx_insn *
3162 get_first_nonnote_insn (void)
3164 rtx_insn *insn = get_insns ();
3166 if (insn)
3168 if (NOTE_P (insn))
3169 for (insn = next_insn (insn);
3170 insn && NOTE_P (insn);
3171 insn = next_insn (insn))
3172 continue;
3173 else
3175 if (NONJUMP_INSN_P (insn)
3176 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3177 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3181 return insn;
3184 /* Return the last nonnote insn emitted in current sequence or current
3185 function. This routine looks inside SEQUENCEs. */
3187 rtx_insn *
3188 get_last_nonnote_insn (void)
3190 rtx_insn *insn = get_last_insn ();
3192 if (insn)
3194 if (NOTE_P (insn))
3195 for (insn = previous_insn (insn);
3196 insn && NOTE_P (insn);
3197 insn = previous_insn (insn))
3198 continue;
3199 else
3201 if (NONJUMP_INSN_P (insn))
3202 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3203 insn = seq->insn (seq->len () - 1);
3207 return insn;
3210 /* Return the number of actual (non-debug) insns emitted in this
3211 function. */
3214 get_max_insn_count (void)
3216 int n = cur_insn_uid;
3218 /* The table size must be stable across -g, to avoid codegen
3219 differences due to debug insns, and not be affected by
3220 -fmin-insn-uid, to avoid excessive table size and to simplify
3221 debugging of -fcompare-debug failures. */
3222 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3223 n -= cur_debug_insn_uid;
3224 else
3225 n -= MIN_NONDEBUG_INSN_UID;
3227 return n;
3231 /* Return the next insn. If it is a SEQUENCE, return the first insn
3232 of the sequence. */
3234 rtx_insn *
3235 next_insn (rtx_insn *insn)
3237 if (insn)
3239 insn = NEXT_INSN (insn);
3240 if (insn && NONJUMP_INSN_P (insn)
3241 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3242 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3245 return insn;
3248 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3249 of the sequence. */
3251 rtx_insn *
3252 previous_insn (rtx_insn *insn)
3254 if (insn)
3256 insn = PREV_INSN (insn);
3257 if (insn && NONJUMP_INSN_P (insn))
3258 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3259 insn = seq->insn (seq->len () - 1);
3262 return insn;
3265 /* Return the next insn after INSN that is not a NOTE. This routine does not
3266 look inside SEQUENCEs. */
3268 rtx_insn *
3269 next_nonnote_insn (rtx uncast_insn)
3271 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3272 while (insn)
3274 insn = NEXT_INSN (insn);
3275 if (insn == 0 || !NOTE_P (insn))
3276 break;
3279 return insn;
3282 /* Return the next insn after INSN that is not a NOTE, but stop the
3283 search before we enter another basic block. This routine does not
3284 look inside SEQUENCEs. */
3286 rtx_insn *
3287 next_nonnote_insn_bb (rtx_insn *insn)
3289 while (insn)
3291 insn = NEXT_INSN (insn);
3292 if (insn == 0 || !NOTE_P (insn))
3293 break;
3294 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3295 return NULL;
3298 return insn;
3301 /* Return the previous insn before INSN that is not a NOTE. This routine does
3302 not look inside SEQUENCEs. */
3304 rtx_insn *
3305 prev_nonnote_insn (rtx uncast_insn)
3307 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3309 while (insn)
3311 insn = PREV_INSN (insn);
3312 if (insn == 0 || !NOTE_P (insn))
3313 break;
3316 return insn;
3319 /* Return the previous insn before INSN that is not a NOTE, but stop
3320 the search before we enter another basic block. This routine does
3321 not look inside SEQUENCEs. */
3323 rtx_insn *
3324 prev_nonnote_insn_bb (rtx uncast_insn)
3326 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3328 while (insn)
3330 insn = PREV_INSN (insn);
3331 if (insn == 0 || !NOTE_P (insn))
3332 break;
3333 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3334 return NULL;
3337 return insn;
3340 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3341 routine does not look inside SEQUENCEs. */
3343 rtx_insn *
3344 next_nondebug_insn (rtx uncast_insn)
3346 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3348 while (insn)
3350 insn = NEXT_INSN (insn);
3351 if (insn == 0 || !DEBUG_INSN_P (insn))
3352 break;
3355 return insn;
3358 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3359 This routine does not look inside SEQUENCEs. */
3361 rtx_insn *
3362 prev_nondebug_insn (rtx uncast_insn)
3364 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3366 while (insn)
3368 insn = PREV_INSN (insn);
3369 if (insn == 0 || !DEBUG_INSN_P (insn))
3370 break;
3373 return insn;
3376 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3377 This routine does not look inside SEQUENCEs. */
3379 rtx_insn *
3380 next_nonnote_nondebug_insn (rtx uncast_insn)
3382 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3384 while (insn)
3386 insn = NEXT_INSN (insn);
3387 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3388 break;
3391 return insn;
3394 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3395 This routine does not look inside SEQUENCEs. */
3397 rtx_insn *
3398 prev_nonnote_nondebug_insn (rtx uncast_insn)
3400 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3402 while (insn)
3404 insn = PREV_INSN (insn);
3405 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3406 break;
3409 return insn;
3412 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3413 or 0, if there is none. This routine does not look inside
3414 SEQUENCEs. */
3416 rtx_insn *
3417 next_real_insn (rtx uncast_insn)
3419 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3421 while (insn)
3423 insn = NEXT_INSN (insn);
3424 if (insn == 0 || INSN_P (insn))
3425 break;
3428 return insn;
3431 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3432 or 0, if there is none. This routine does not look inside
3433 SEQUENCEs. */
3435 rtx_insn *
3436 prev_real_insn (rtx uncast_insn)
3438 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3440 while (insn)
3442 insn = PREV_INSN (insn);
3443 if (insn == 0 || INSN_P (insn))
3444 break;
3447 return insn;
3450 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3451 This routine does not look inside SEQUENCEs. */
3453 rtx_call_insn *
3454 last_call_insn (void)
3456 rtx_insn *insn;
3458 for (insn = get_last_insn ();
3459 insn && !CALL_P (insn);
3460 insn = PREV_INSN (insn))
3463 return safe_as_a <rtx_call_insn *> (insn);
3466 /* Find the next insn after INSN that really does something. This routine
3467 does not look inside SEQUENCEs. After reload this also skips over
3468 standalone USE and CLOBBER insn. */
3471 active_insn_p (const_rtx insn)
3473 return (CALL_P (insn) || JUMP_P (insn)
3474 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3475 || (NONJUMP_INSN_P (insn)
3476 && (! reload_completed
3477 || (GET_CODE (PATTERN (insn)) != USE
3478 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3481 rtx_insn *
3482 next_active_insn (rtx uncast_insn)
3484 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3486 while (insn)
3488 insn = NEXT_INSN (insn);
3489 if (insn == 0 || active_insn_p (insn))
3490 break;
3493 return insn;
3496 /* Find the last insn before INSN that really does something. This routine
3497 does not look inside SEQUENCEs. After reload this also skips over
3498 standalone USE and CLOBBER insn. */
3500 rtx_insn *
3501 prev_active_insn (rtx uncast_insn)
3503 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3505 while (insn)
3507 insn = PREV_INSN (insn);
3508 if (insn == 0 || active_insn_p (insn))
3509 break;
3512 return insn;
3515 #ifdef HAVE_cc0
3516 /* Return the next insn that uses CC0 after INSN, which is assumed to
3517 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3518 applied to the result of this function should yield INSN).
3520 Normally, this is simply the next insn. However, if a REG_CC_USER note
3521 is present, it contains the insn that uses CC0.
3523 Return 0 if we can't find the insn. */
3525 rtx_insn *
3526 next_cc0_user (rtx uncast_insn)
3528 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3530 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3532 if (note)
3533 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3535 insn = next_nonnote_insn (insn);
3536 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3537 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3539 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3540 return insn;
3542 return 0;
3545 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3546 note, it is the previous insn. */
3548 rtx_insn *
3549 prev_cc0_setter (rtx uncast_insn)
3551 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3553 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3555 if (note)
3556 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3558 insn = prev_nonnote_insn (insn);
3559 gcc_assert (sets_cc0_p (PATTERN (insn)));
3561 return insn;
3563 #endif
3565 #ifdef AUTO_INC_DEC
3566 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3568 static int
3569 find_auto_inc (const_rtx x, const_rtx reg)
3571 subrtx_iterator::array_type array;
3572 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3574 const_rtx x = *iter;
3575 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3576 && rtx_equal_p (reg, XEXP (x, 0)))
3577 return true;
3579 return false;
3581 #endif
3583 /* Increment the label uses for all labels present in rtx. */
3585 static void
3586 mark_label_nuses (rtx x)
3588 enum rtx_code code;
3589 int i, j;
3590 const char *fmt;
3592 code = GET_CODE (x);
3593 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3594 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3596 fmt = GET_RTX_FORMAT (code);
3597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3599 if (fmt[i] == 'e')
3600 mark_label_nuses (XEXP (x, i));
3601 else if (fmt[i] == 'E')
3602 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3603 mark_label_nuses (XVECEXP (x, i, j));
3608 /* Try splitting insns that can be split for better scheduling.
3609 PAT is the pattern which might split.
3610 TRIAL is the insn providing PAT.
3611 LAST is nonzero if we should return the last insn of the sequence produced.
3613 If this routine succeeds in splitting, it returns the first or last
3614 replacement insn depending on the value of LAST. Otherwise, it
3615 returns TRIAL. If the insn to be returned can be split, it will be. */
3617 rtx_insn *
3618 try_split (rtx pat, rtx uncast_trial, int last)
3620 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3621 rtx_insn *before = PREV_INSN (trial);
3622 rtx_insn *after = NEXT_INSN (trial);
3623 rtx note;
3624 rtx_insn *seq, *tem;
3625 int probability;
3626 rtx_insn *insn_last, *insn;
3627 int njumps = 0;
3628 rtx call_insn = NULL_RTX;
3630 /* We're not good at redistributing frame information. */
3631 if (RTX_FRAME_RELATED_P (trial))
3632 return trial;
3634 if (any_condjump_p (trial)
3635 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3636 split_branch_probability = XINT (note, 0);
3637 probability = split_branch_probability;
3639 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3641 split_branch_probability = -1;
3643 if (!seq)
3644 return trial;
3646 /* Avoid infinite loop if any insn of the result matches
3647 the original pattern. */
3648 insn_last = seq;
3649 while (1)
3651 if (INSN_P (insn_last)
3652 && rtx_equal_p (PATTERN (insn_last), pat))
3653 return trial;
3654 if (!NEXT_INSN (insn_last))
3655 break;
3656 insn_last = NEXT_INSN (insn_last);
3659 /* We will be adding the new sequence to the function. The splitters
3660 may have introduced invalid RTL sharing, so unshare the sequence now. */
3661 unshare_all_rtl_in_chain (seq);
3663 /* Mark labels and copy flags. */
3664 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3666 if (JUMP_P (insn))
3668 if (JUMP_P (trial))
3669 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3670 mark_jump_label (PATTERN (insn), insn, 0);
3671 njumps++;
3672 if (probability != -1
3673 && any_condjump_p (insn)
3674 && !find_reg_note (insn, REG_BR_PROB, 0))
3676 /* We can preserve the REG_BR_PROB notes only if exactly
3677 one jump is created, otherwise the machine description
3678 is responsible for this step using
3679 split_branch_probability variable. */
3680 gcc_assert (njumps == 1);
3681 add_int_reg_note (insn, REG_BR_PROB, probability);
3686 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3687 in SEQ and copy any additional information across. */
3688 if (CALL_P (trial))
3690 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3691 if (CALL_P (insn))
3693 rtx_insn *next;
3694 rtx *p;
3696 gcc_assert (call_insn == NULL_RTX);
3697 call_insn = insn;
3699 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3700 target may have explicitly specified. */
3701 p = &CALL_INSN_FUNCTION_USAGE (insn);
3702 while (*p)
3703 p = &XEXP (*p, 1);
3704 *p = CALL_INSN_FUNCTION_USAGE (trial);
3706 /* If the old call was a sibling call, the new one must
3707 be too. */
3708 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3710 /* If the new call is the last instruction in the sequence,
3711 it will effectively replace the old call in-situ. Otherwise
3712 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3713 so that it comes immediately after the new call. */
3714 if (NEXT_INSN (insn))
3715 for (next = NEXT_INSN (trial);
3716 next && NOTE_P (next);
3717 next = NEXT_INSN (next))
3718 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3720 remove_insn (next);
3721 add_insn_after (next, insn, NULL);
3722 break;
3727 /* Copy notes, particularly those related to the CFG. */
3728 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3730 switch (REG_NOTE_KIND (note))
3732 case REG_EH_REGION:
3733 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3734 break;
3736 case REG_NORETURN:
3737 case REG_SETJMP:
3738 case REG_TM:
3739 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3741 if (CALL_P (insn))
3742 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3744 break;
3746 case REG_NON_LOCAL_GOTO:
3747 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3749 if (JUMP_P (insn))
3750 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3752 break;
3754 #ifdef AUTO_INC_DEC
3755 case REG_INC:
3756 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3758 rtx reg = XEXP (note, 0);
3759 if (!FIND_REG_INC_NOTE (insn, reg)
3760 && find_auto_inc (PATTERN (insn), reg))
3761 add_reg_note (insn, REG_INC, reg);
3763 break;
3764 #endif
3766 case REG_ARGS_SIZE:
3767 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3768 break;
3770 case REG_CALL_DECL:
3771 gcc_assert (call_insn != NULL_RTX);
3772 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3773 break;
3775 default:
3776 break;
3780 /* If there are LABELS inside the split insns increment the
3781 usage count so we don't delete the label. */
3782 if (INSN_P (trial))
3784 insn = insn_last;
3785 while (insn != NULL_RTX)
3787 /* JUMP_P insns have already been "marked" above. */
3788 if (NONJUMP_INSN_P (insn))
3789 mark_label_nuses (PATTERN (insn));
3791 insn = PREV_INSN (insn);
3795 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3797 delete_insn (trial);
3799 /* Recursively call try_split for each new insn created; by the
3800 time control returns here that insn will be fully split, so
3801 set LAST and continue from the insn after the one returned.
3802 We can't use next_active_insn here since AFTER may be a note.
3803 Ignore deleted insns, which can be occur if not optimizing. */
3804 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3805 if (! tem->deleted () && INSN_P (tem))
3806 tem = try_split (PATTERN (tem), tem, 1);
3808 /* Return either the first or the last insn, depending on which was
3809 requested. */
3810 return last
3811 ? (after ? PREV_INSN (after) : get_last_insn ())
3812 : NEXT_INSN (before);
3815 /* Make and return an INSN rtx, initializing all its slots.
3816 Store PATTERN in the pattern slots. */
3818 rtx_insn *
3819 make_insn_raw (rtx pattern)
3821 rtx_insn *insn;
3823 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3825 INSN_UID (insn) = cur_insn_uid++;
3826 PATTERN (insn) = pattern;
3827 INSN_CODE (insn) = -1;
3828 REG_NOTES (insn) = NULL;
3829 INSN_LOCATION (insn) = curr_insn_location ();
3830 BLOCK_FOR_INSN (insn) = NULL;
3832 #ifdef ENABLE_RTL_CHECKING
3833 if (insn
3834 && INSN_P (insn)
3835 && (returnjump_p (insn)
3836 || (GET_CODE (insn) == SET
3837 && SET_DEST (insn) == pc_rtx)))
3839 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3840 debug_rtx (insn);
3842 #endif
3844 return insn;
3847 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3849 static rtx_insn *
3850 make_debug_insn_raw (rtx pattern)
3852 rtx_debug_insn *insn;
3854 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3855 INSN_UID (insn) = cur_debug_insn_uid++;
3856 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3857 INSN_UID (insn) = cur_insn_uid++;
3859 PATTERN (insn) = pattern;
3860 INSN_CODE (insn) = -1;
3861 REG_NOTES (insn) = NULL;
3862 INSN_LOCATION (insn) = curr_insn_location ();
3863 BLOCK_FOR_INSN (insn) = NULL;
3865 return insn;
3868 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3870 static rtx_insn *
3871 make_jump_insn_raw (rtx pattern)
3873 rtx_jump_insn *insn;
3875 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3876 INSN_UID (insn) = cur_insn_uid++;
3878 PATTERN (insn) = pattern;
3879 INSN_CODE (insn) = -1;
3880 REG_NOTES (insn) = NULL;
3881 JUMP_LABEL (insn) = NULL;
3882 INSN_LOCATION (insn) = curr_insn_location ();
3883 BLOCK_FOR_INSN (insn) = NULL;
3885 return insn;
3888 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3890 static rtx_insn *
3891 make_call_insn_raw (rtx pattern)
3893 rtx_call_insn *insn;
3895 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3896 INSN_UID (insn) = cur_insn_uid++;
3898 PATTERN (insn) = pattern;
3899 INSN_CODE (insn) = -1;
3900 REG_NOTES (insn) = NULL;
3901 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3902 INSN_LOCATION (insn) = curr_insn_location ();
3903 BLOCK_FOR_INSN (insn) = NULL;
3905 return insn;
3908 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3910 static rtx_note *
3911 make_note_raw (enum insn_note subtype)
3913 /* Some notes are never created this way at all. These notes are
3914 only created by patching out insns. */
3915 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3916 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3918 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3919 INSN_UID (note) = cur_insn_uid++;
3920 NOTE_KIND (note) = subtype;
3921 BLOCK_FOR_INSN (note) = NULL;
3922 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3923 return note;
3926 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3927 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3928 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3930 static inline void
3931 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3933 SET_PREV_INSN (insn) = prev;
3934 SET_NEXT_INSN (insn) = next;
3935 if (prev != NULL)
3937 SET_NEXT_INSN (prev) = insn;
3938 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3940 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3941 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3944 if (next != NULL)
3946 SET_PREV_INSN (next) = insn;
3947 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3949 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3950 SET_PREV_INSN (sequence->insn (0)) = insn;
3954 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3956 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3957 SET_PREV_INSN (sequence->insn (0)) = prev;
3958 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3962 /* Add INSN to the end of the doubly-linked list.
3963 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3965 void
3966 add_insn (rtx_insn *insn)
3968 rtx_insn *prev = get_last_insn ();
3969 link_insn_into_chain (insn, prev, NULL);
3970 if (NULL == get_insns ())
3971 set_first_insn (insn);
3972 set_last_insn (insn);
3975 /* Add INSN into the doubly-linked list after insn AFTER. */
3977 static void
3978 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
3980 rtx_insn *next = NEXT_INSN (after);
3982 gcc_assert (!optimize || !after->deleted ());
3984 link_insn_into_chain (insn, after, next);
3986 if (next == NULL)
3988 if (get_last_insn () == after)
3989 set_last_insn (insn);
3990 else
3992 struct sequence_stack *stack = seq_stack;
3993 /* Scan all pending sequences too. */
3994 for (; stack; stack = stack->next)
3995 if (after == stack->last)
3997 stack->last = insn;
3998 break;
4004 /* Add INSN into the doubly-linked list before insn BEFORE. */
4006 static void
4007 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4009 rtx_insn *prev = PREV_INSN (before);
4011 gcc_assert (!optimize || !before->deleted ());
4013 link_insn_into_chain (insn, prev, before);
4015 if (prev == NULL)
4017 if (get_insns () == before)
4018 set_first_insn (insn);
4019 else
4021 struct sequence_stack *stack = seq_stack;
4022 /* Scan all pending sequences too. */
4023 for (; stack; stack = stack->next)
4024 if (before == stack->first)
4026 stack->first = insn;
4027 break;
4030 gcc_assert (stack);
4035 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4036 If BB is NULL, an attempt is made to infer the bb from before.
4038 This and the next function should be the only functions called
4039 to insert an insn once delay slots have been filled since only
4040 they know how to update a SEQUENCE. */
4042 void
4043 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4045 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4046 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4047 add_insn_after_nobb (insn, after);
4048 if (!BARRIER_P (after)
4049 && !BARRIER_P (insn)
4050 && (bb = BLOCK_FOR_INSN (after)))
4052 set_block_for_insn (insn, bb);
4053 if (INSN_P (insn))
4054 df_insn_rescan (insn);
4055 /* Should not happen as first in the BB is always
4056 either NOTE or LABEL. */
4057 if (BB_END (bb) == after
4058 /* Avoid clobbering of structure when creating new BB. */
4059 && !BARRIER_P (insn)
4060 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4061 BB_END (bb) = insn;
4065 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4066 If BB is NULL, an attempt is made to infer the bb from before.
4068 This and the previous function should be the only functions called
4069 to insert an insn once delay slots have been filled since only
4070 they know how to update a SEQUENCE. */
4072 void
4073 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4075 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4076 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4077 add_insn_before_nobb (insn, before);
4079 if (!bb
4080 && !BARRIER_P (before)
4081 && !BARRIER_P (insn))
4082 bb = BLOCK_FOR_INSN (before);
4084 if (bb)
4086 set_block_for_insn (insn, bb);
4087 if (INSN_P (insn))
4088 df_insn_rescan (insn);
4089 /* Should not happen as first in the BB is always either NOTE or
4090 LABEL. */
4091 gcc_assert (BB_HEAD (bb) != insn
4092 /* Avoid clobbering of structure when creating new BB. */
4093 || BARRIER_P (insn)
4094 || NOTE_INSN_BASIC_BLOCK_P (insn));
4098 /* Replace insn with an deleted instruction note. */
4100 void
4101 set_insn_deleted (rtx insn)
4103 if (INSN_P (insn))
4104 df_insn_delete (as_a <rtx_insn *> (insn));
4105 PUT_CODE (insn, NOTE);
4106 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4110 /* Unlink INSN from the insn chain.
4112 This function knows how to handle sequences.
4114 This function does not invalidate data flow information associated with
4115 INSN (i.e. does not call df_insn_delete). That makes this function
4116 usable for only disconnecting an insn from the chain, and re-emit it
4117 elsewhere later.
4119 To later insert INSN elsewhere in the insn chain via add_insn and
4120 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4121 the caller. Nullifying them here breaks many insn chain walks.
4123 To really delete an insn and related DF information, use delete_insn. */
4125 void
4126 remove_insn (rtx uncast_insn)
4128 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4129 rtx_insn *next = NEXT_INSN (insn);
4130 rtx_insn *prev = PREV_INSN (insn);
4131 basic_block bb;
4133 if (prev)
4135 SET_NEXT_INSN (prev) = next;
4136 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4138 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4139 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4142 else if (get_insns () == insn)
4144 if (next)
4145 SET_PREV_INSN (next) = NULL;
4146 set_first_insn (next);
4148 else
4150 struct sequence_stack *stack = seq_stack;
4151 /* Scan all pending sequences too. */
4152 for (; stack; stack = stack->next)
4153 if (insn == stack->first)
4155 stack->first = next;
4156 break;
4159 gcc_assert (stack);
4162 if (next)
4164 SET_PREV_INSN (next) = prev;
4165 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4167 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4168 SET_PREV_INSN (sequence->insn (0)) = prev;
4171 else if (get_last_insn () == insn)
4172 set_last_insn (prev);
4173 else
4175 struct sequence_stack *stack = seq_stack;
4176 /* Scan all pending sequences too. */
4177 for (; stack; stack = stack->next)
4178 if (insn == stack->last)
4180 stack->last = prev;
4181 break;
4184 gcc_assert (stack);
4187 /* Fix up basic block boundaries, if necessary. */
4188 if (!BARRIER_P (insn)
4189 && (bb = BLOCK_FOR_INSN (insn)))
4191 if (BB_HEAD (bb) == insn)
4193 /* Never ever delete the basic block note without deleting whole
4194 basic block. */
4195 gcc_assert (!NOTE_P (insn));
4196 BB_HEAD (bb) = next;
4198 if (BB_END (bb) == insn)
4199 BB_END (bb) = prev;
4203 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4205 void
4206 add_function_usage_to (rtx call_insn, rtx call_fusage)
4208 gcc_assert (call_insn && CALL_P (call_insn));
4210 /* Put the register usage information on the CALL. If there is already
4211 some usage information, put ours at the end. */
4212 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4214 rtx link;
4216 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4217 link = XEXP (link, 1))
4220 XEXP (link, 1) = call_fusage;
4222 else
4223 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4226 /* Delete all insns made since FROM.
4227 FROM becomes the new last instruction. */
4229 void
4230 delete_insns_since (rtx_insn *from)
4232 if (from == 0)
4233 set_first_insn (0);
4234 else
4235 SET_NEXT_INSN (from) = 0;
4236 set_last_insn (from);
4239 /* This function is deprecated, please use sequences instead.
4241 Move a consecutive bunch of insns to a different place in the chain.
4242 The insns to be moved are those between FROM and TO.
4243 They are moved to a new position after the insn AFTER.
4244 AFTER must not be FROM or TO or any insn in between.
4246 This function does not know about SEQUENCEs and hence should not be
4247 called after delay-slot filling has been done. */
4249 void
4250 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4252 #ifdef ENABLE_CHECKING
4253 rtx_insn *x;
4254 for (x = from; x != to; x = NEXT_INSN (x))
4255 gcc_assert (after != x);
4256 gcc_assert (after != to);
4257 #endif
4259 /* Splice this bunch out of where it is now. */
4260 if (PREV_INSN (from))
4261 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4262 if (NEXT_INSN (to))
4263 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4264 if (get_last_insn () == to)
4265 set_last_insn (PREV_INSN (from));
4266 if (get_insns () == from)
4267 set_first_insn (NEXT_INSN (to));
4269 /* Make the new neighbors point to it and it to them. */
4270 if (NEXT_INSN (after))
4271 SET_PREV_INSN (NEXT_INSN (after)) = to;
4273 SET_NEXT_INSN (to) = NEXT_INSN (after);
4274 SET_PREV_INSN (from) = after;
4275 SET_NEXT_INSN (after) = from;
4276 if (after == get_last_insn ())
4277 set_last_insn (to);
4280 /* Same as function above, but take care to update BB boundaries. */
4281 void
4282 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4284 rtx_insn *prev = PREV_INSN (from);
4285 basic_block bb, bb2;
4287 reorder_insns_nobb (from, to, after);
4289 if (!BARRIER_P (after)
4290 && (bb = BLOCK_FOR_INSN (after)))
4292 rtx_insn *x;
4293 df_set_bb_dirty (bb);
4295 if (!BARRIER_P (from)
4296 && (bb2 = BLOCK_FOR_INSN (from)))
4298 if (BB_END (bb2) == to)
4299 BB_END (bb2) = prev;
4300 df_set_bb_dirty (bb2);
4303 if (BB_END (bb) == after)
4304 BB_END (bb) = to;
4306 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4307 if (!BARRIER_P (x))
4308 df_insn_change_bb (x, bb);
4313 /* Emit insn(s) of given code and pattern
4314 at a specified place within the doubly-linked list.
4316 All of the emit_foo global entry points accept an object
4317 X which is either an insn list or a PATTERN of a single
4318 instruction.
4320 There are thus a few canonical ways to generate code and
4321 emit it at a specific place in the instruction stream. For
4322 example, consider the instruction named SPOT and the fact that
4323 we would like to emit some instructions before SPOT. We might
4324 do it like this:
4326 start_sequence ();
4327 ... emit the new instructions ...
4328 insns_head = get_insns ();
4329 end_sequence ();
4331 emit_insn_before (insns_head, SPOT);
4333 It used to be common to generate SEQUENCE rtl instead, but that
4334 is a relic of the past which no longer occurs. The reason is that
4335 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4336 generated would almost certainly die right after it was created. */
4338 static rtx_insn *
4339 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4340 rtx_insn *(*make_raw) (rtx))
4342 rtx_insn *insn;
4344 gcc_assert (before);
4346 if (x == NULL_RTX)
4347 return safe_as_a <rtx_insn *> (last);
4349 switch (GET_CODE (x))
4351 case DEBUG_INSN:
4352 case INSN:
4353 case JUMP_INSN:
4354 case CALL_INSN:
4355 case CODE_LABEL:
4356 case BARRIER:
4357 case NOTE:
4358 insn = as_a <rtx_insn *> (x);
4359 while (insn)
4361 rtx_insn *next = NEXT_INSN (insn);
4362 add_insn_before (insn, before, bb);
4363 last = insn;
4364 insn = next;
4366 break;
4368 #ifdef ENABLE_RTL_CHECKING
4369 case SEQUENCE:
4370 gcc_unreachable ();
4371 break;
4372 #endif
4374 default:
4375 last = (*make_raw) (x);
4376 add_insn_before (last, before, bb);
4377 break;
4380 return safe_as_a <rtx_insn *> (last);
4383 /* Make X be output before the instruction BEFORE. */
4385 rtx_insn *
4386 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4388 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4391 /* Make an instruction with body X and code JUMP_INSN
4392 and output it before the instruction BEFORE. */
4394 rtx_insn *
4395 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4397 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4398 make_jump_insn_raw);
4401 /* Make an instruction with body X and code CALL_INSN
4402 and output it before the instruction BEFORE. */
4404 rtx_insn *
4405 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4407 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4408 make_call_insn_raw);
4411 /* Make an instruction with body X and code DEBUG_INSN
4412 and output it before the instruction BEFORE. */
4414 rtx_insn *
4415 emit_debug_insn_before_noloc (rtx x, rtx before)
4417 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4418 make_debug_insn_raw);
4421 /* Make an insn of code BARRIER
4422 and output it before the insn BEFORE. */
4424 rtx_barrier *
4425 emit_barrier_before (rtx before)
4427 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4429 INSN_UID (insn) = cur_insn_uid++;
4431 add_insn_before (insn, before, NULL);
4432 return insn;
4435 /* Emit the label LABEL before the insn BEFORE. */
4437 rtx_insn *
4438 emit_label_before (rtx label, rtx_insn *before)
4440 gcc_checking_assert (INSN_UID (label) == 0);
4441 INSN_UID (label) = cur_insn_uid++;
4442 add_insn_before (label, before, NULL);
4443 return as_a <rtx_insn *> (label);
4446 /* Helper for emit_insn_after, handles lists of instructions
4447 efficiently. */
4449 static rtx_insn *
4450 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4452 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4453 rtx_insn *last;
4454 rtx_insn *after_after;
4455 if (!bb && !BARRIER_P (after))
4456 bb = BLOCK_FOR_INSN (after);
4458 if (bb)
4460 df_set_bb_dirty (bb);
4461 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4462 if (!BARRIER_P (last))
4464 set_block_for_insn (last, bb);
4465 df_insn_rescan (last);
4467 if (!BARRIER_P (last))
4469 set_block_for_insn (last, bb);
4470 df_insn_rescan (last);
4472 if (BB_END (bb) == after)
4473 BB_END (bb) = last;
4475 else
4476 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4477 continue;
4479 after_after = NEXT_INSN (after);
4481 SET_NEXT_INSN (after) = first;
4482 SET_PREV_INSN (first) = after;
4483 SET_NEXT_INSN (last) = after_after;
4484 if (after_after)
4485 SET_PREV_INSN (after_after) = last;
4487 if (after == get_last_insn ())
4488 set_last_insn (last);
4490 return last;
4493 static rtx_insn *
4494 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4495 rtx_insn *(*make_raw)(rtx))
4497 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4498 rtx_insn *last = after;
4500 gcc_assert (after);
4502 if (x == NULL_RTX)
4503 return last;
4505 switch (GET_CODE (x))
4507 case DEBUG_INSN:
4508 case INSN:
4509 case JUMP_INSN:
4510 case CALL_INSN:
4511 case CODE_LABEL:
4512 case BARRIER:
4513 case NOTE:
4514 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4515 break;
4517 #ifdef ENABLE_RTL_CHECKING
4518 case SEQUENCE:
4519 gcc_unreachable ();
4520 break;
4521 #endif
4523 default:
4524 last = (*make_raw) (x);
4525 add_insn_after (last, after, bb);
4526 break;
4529 return last;
4532 /* Make X be output after the insn AFTER and set the BB of insn. If
4533 BB is NULL, an attempt is made to infer the BB from AFTER. */
4535 rtx_insn *
4536 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4538 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4542 /* Make an insn of code JUMP_INSN with body X
4543 and output it after the insn AFTER. */
4545 rtx_insn *
4546 emit_jump_insn_after_noloc (rtx x, rtx after)
4548 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4551 /* Make an instruction with body X and code CALL_INSN
4552 and output it after the instruction AFTER. */
4554 rtx_insn *
4555 emit_call_insn_after_noloc (rtx x, rtx after)
4557 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4560 /* Make an instruction with body X and code CALL_INSN
4561 and output it after the instruction AFTER. */
4563 rtx_insn *
4564 emit_debug_insn_after_noloc (rtx x, rtx after)
4566 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4569 /* Make an insn of code BARRIER
4570 and output it after the insn AFTER. */
4572 rtx_barrier *
4573 emit_barrier_after (rtx after)
4575 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4577 INSN_UID (insn) = cur_insn_uid++;
4579 add_insn_after (insn, after, NULL);
4580 return insn;
4583 /* Emit the label LABEL after the insn AFTER. */
4585 rtx_insn *
4586 emit_label_after (rtx label, rtx_insn *after)
4588 gcc_checking_assert (INSN_UID (label) == 0);
4589 INSN_UID (label) = cur_insn_uid++;
4590 add_insn_after (label, after, NULL);
4591 return as_a <rtx_insn *> (label);
4594 /* Notes require a bit of special handling: Some notes need to have their
4595 BLOCK_FOR_INSN set, others should never have it set, and some should
4596 have it set or clear depending on the context. */
4598 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4599 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4600 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4602 static bool
4603 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4605 switch (subtype)
4607 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4608 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4609 return true;
4611 /* Notes for var tracking and EH region markers can appear between or
4612 inside basic blocks. If the caller is emitting on the basic block
4613 boundary, do not set BLOCK_FOR_INSN on the new note. */
4614 case NOTE_INSN_VAR_LOCATION:
4615 case NOTE_INSN_CALL_ARG_LOCATION:
4616 case NOTE_INSN_EH_REGION_BEG:
4617 case NOTE_INSN_EH_REGION_END:
4618 return on_bb_boundary_p;
4620 /* Otherwise, BLOCK_FOR_INSN must be set. */
4621 default:
4622 return false;
4626 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4628 rtx_note *
4629 emit_note_after (enum insn_note subtype, rtx uncast_after)
4631 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4632 rtx_note *note = make_note_raw (subtype);
4633 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4634 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4636 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4637 add_insn_after_nobb (note, after);
4638 else
4639 add_insn_after (note, after, bb);
4640 return note;
4643 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4645 rtx_note *
4646 emit_note_before (enum insn_note subtype, rtx uncast_before)
4648 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4649 rtx_note *note = make_note_raw (subtype);
4650 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4651 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4653 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4654 add_insn_before_nobb (note, before);
4655 else
4656 add_insn_before (note, before, bb);
4657 return note;
4660 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4661 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4663 static rtx_insn *
4664 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4665 rtx_insn *(*make_raw) (rtx))
4667 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4668 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4670 if (pattern == NULL_RTX || !loc)
4671 return safe_as_a <rtx_insn *> (last);
4673 after = NEXT_INSN (after);
4674 while (1)
4676 if (active_insn_p (after) && !INSN_LOCATION (after))
4677 INSN_LOCATION (after) = loc;
4678 if (after == last)
4679 break;
4680 after = NEXT_INSN (after);
4682 return safe_as_a <rtx_insn *> (last);
4685 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4686 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4687 any DEBUG_INSNs. */
4689 static rtx_insn *
4690 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4691 rtx_insn *(*make_raw) (rtx))
4693 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4694 rtx_insn *prev = after;
4696 if (skip_debug_insns)
4697 while (DEBUG_INSN_P (prev))
4698 prev = PREV_INSN (prev);
4700 if (INSN_P (prev))
4701 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4702 make_raw);
4703 else
4704 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4707 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4708 rtx_insn *
4709 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4711 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4714 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4715 rtx_insn *
4716 emit_insn_after (rtx pattern, rtx after)
4718 return emit_pattern_after (pattern, after, true, make_insn_raw);
4721 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4722 rtx_insn *
4723 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4725 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4728 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4729 rtx_insn *
4730 emit_jump_insn_after (rtx pattern, rtx after)
4732 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4735 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4736 rtx_insn *
4737 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4739 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4742 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4743 rtx_insn *
4744 emit_call_insn_after (rtx pattern, rtx after)
4746 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4749 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4750 rtx_insn *
4751 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4753 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4756 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4757 rtx_insn *
4758 emit_debug_insn_after (rtx pattern, rtx after)
4760 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4763 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4764 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4765 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4766 CALL_INSN, etc. */
4768 static rtx_insn *
4769 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4770 rtx_insn *(*make_raw) (rtx))
4772 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4773 rtx_insn *first = PREV_INSN (before);
4774 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4775 insnp ? before : NULL_RTX,
4776 NULL, make_raw);
4778 if (pattern == NULL_RTX || !loc)
4779 return last;
4781 if (!first)
4782 first = get_insns ();
4783 else
4784 first = NEXT_INSN (first);
4785 while (1)
4787 if (active_insn_p (first) && !INSN_LOCATION (first))
4788 INSN_LOCATION (first) = loc;
4789 if (first == last)
4790 break;
4791 first = NEXT_INSN (first);
4793 return last;
4796 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4797 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4798 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4799 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4801 static rtx_insn *
4802 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4803 bool insnp, rtx_insn *(*make_raw) (rtx))
4805 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4806 rtx_insn *next = before;
4808 if (skip_debug_insns)
4809 while (DEBUG_INSN_P (next))
4810 next = PREV_INSN (next);
4812 if (INSN_P (next))
4813 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4814 insnp, make_raw);
4815 else
4816 return emit_pattern_before_noloc (pattern, before,
4817 insnp ? before : NULL_RTX,
4818 NULL, make_raw);
4821 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4822 rtx_insn *
4823 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4825 return emit_pattern_before_setloc (pattern, before, loc, true,
4826 make_insn_raw);
4829 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4830 rtx_insn *
4831 emit_insn_before (rtx pattern, rtx before)
4833 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4836 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4837 rtx_insn *
4838 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4840 return emit_pattern_before_setloc (pattern, before, loc, false,
4841 make_jump_insn_raw);
4844 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4845 rtx_insn *
4846 emit_jump_insn_before (rtx pattern, rtx before)
4848 return emit_pattern_before (pattern, before, true, false,
4849 make_jump_insn_raw);
4852 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4853 rtx_insn *
4854 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4856 return emit_pattern_before_setloc (pattern, before, loc, false,
4857 make_call_insn_raw);
4860 /* Like emit_call_insn_before_noloc,
4861 but set insn_location according to BEFORE. */
4862 rtx_insn *
4863 emit_call_insn_before (rtx pattern, rtx_insn *before)
4865 return emit_pattern_before (pattern, before, true, false,
4866 make_call_insn_raw);
4869 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4870 rtx_insn *
4871 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4873 return emit_pattern_before_setloc (pattern, before, loc, false,
4874 make_debug_insn_raw);
4877 /* Like emit_debug_insn_before_noloc,
4878 but set insn_location according to BEFORE. */
4879 rtx_insn *
4880 emit_debug_insn_before (rtx pattern, rtx before)
4882 return emit_pattern_before (pattern, before, false, false,
4883 make_debug_insn_raw);
4886 /* Take X and emit it at the end of the doubly-linked
4887 INSN list.
4889 Returns the last insn emitted. */
4891 rtx_insn *
4892 emit_insn (rtx x)
4894 rtx_insn *last = get_last_insn ();
4895 rtx_insn *insn;
4897 if (x == NULL_RTX)
4898 return last;
4900 switch (GET_CODE (x))
4902 case DEBUG_INSN:
4903 case INSN:
4904 case JUMP_INSN:
4905 case CALL_INSN:
4906 case CODE_LABEL:
4907 case BARRIER:
4908 case NOTE:
4909 insn = as_a <rtx_insn *> (x);
4910 while (insn)
4912 rtx_insn *next = NEXT_INSN (insn);
4913 add_insn (insn);
4914 last = insn;
4915 insn = next;
4917 break;
4919 #ifdef ENABLE_RTL_CHECKING
4920 case JUMP_TABLE_DATA:
4921 case SEQUENCE:
4922 gcc_unreachable ();
4923 break;
4924 #endif
4926 default:
4927 last = make_insn_raw (x);
4928 add_insn (last);
4929 break;
4932 return last;
4935 /* Make an insn of code DEBUG_INSN with pattern X
4936 and add it to the end of the doubly-linked list. */
4938 rtx_insn *
4939 emit_debug_insn (rtx x)
4941 rtx_insn *last = get_last_insn ();
4942 rtx_insn *insn;
4944 if (x == NULL_RTX)
4945 return last;
4947 switch (GET_CODE (x))
4949 case DEBUG_INSN:
4950 case INSN:
4951 case JUMP_INSN:
4952 case CALL_INSN:
4953 case CODE_LABEL:
4954 case BARRIER:
4955 case NOTE:
4956 insn = as_a <rtx_insn *> (x);
4957 while (insn)
4959 rtx_insn *next = NEXT_INSN (insn);
4960 add_insn (insn);
4961 last = insn;
4962 insn = next;
4964 break;
4966 #ifdef ENABLE_RTL_CHECKING
4967 case JUMP_TABLE_DATA:
4968 case SEQUENCE:
4969 gcc_unreachable ();
4970 break;
4971 #endif
4973 default:
4974 last = make_debug_insn_raw (x);
4975 add_insn (last);
4976 break;
4979 return last;
4982 /* Make an insn of code JUMP_INSN with pattern X
4983 and add it to the end of the doubly-linked list. */
4985 rtx_insn *
4986 emit_jump_insn (rtx x)
4988 rtx_insn *last = NULL;
4989 rtx_insn *insn;
4991 switch (GET_CODE (x))
4993 case DEBUG_INSN:
4994 case INSN:
4995 case JUMP_INSN:
4996 case CALL_INSN:
4997 case CODE_LABEL:
4998 case BARRIER:
4999 case NOTE:
5000 insn = as_a <rtx_insn *> (x);
5001 while (insn)
5003 rtx_insn *next = NEXT_INSN (insn);
5004 add_insn (insn);
5005 last = insn;
5006 insn = next;
5008 break;
5010 #ifdef ENABLE_RTL_CHECKING
5011 case JUMP_TABLE_DATA:
5012 case SEQUENCE:
5013 gcc_unreachable ();
5014 break;
5015 #endif
5017 default:
5018 last = make_jump_insn_raw (x);
5019 add_insn (last);
5020 break;
5023 return last;
5026 /* Make an insn of code CALL_INSN with pattern X
5027 and add it to the end of the doubly-linked list. */
5029 rtx_insn *
5030 emit_call_insn (rtx x)
5032 rtx_insn *insn;
5034 switch (GET_CODE (x))
5036 case DEBUG_INSN:
5037 case INSN:
5038 case JUMP_INSN:
5039 case CALL_INSN:
5040 case CODE_LABEL:
5041 case BARRIER:
5042 case NOTE:
5043 insn = emit_insn (x);
5044 break;
5046 #ifdef ENABLE_RTL_CHECKING
5047 case SEQUENCE:
5048 case JUMP_TABLE_DATA:
5049 gcc_unreachable ();
5050 break;
5051 #endif
5053 default:
5054 insn = make_call_insn_raw (x);
5055 add_insn (insn);
5056 break;
5059 return insn;
5062 /* Add the label LABEL to the end of the doubly-linked list. */
5064 rtx_insn *
5065 emit_label (rtx label)
5067 gcc_checking_assert (INSN_UID (label) == 0);
5068 INSN_UID (label) = cur_insn_uid++;
5069 add_insn (as_a <rtx_insn *> (label));
5070 return as_a <rtx_insn *> (label);
5073 /* Make an insn of code JUMP_TABLE_DATA
5074 and add it to the end of the doubly-linked list. */
5076 rtx_jump_table_data *
5077 emit_jump_table_data (rtx table)
5079 rtx_jump_table_data *jump_table_data =
5080 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5081 INSN_UID (jump_table_data) = cur_insn_uid++;
5082 PATTERN (jump_table_data) = table;
5083 BLOCK_FOR_INSN (jump_table_data) = NULL;
5084 add_insn (jump_table_data);
5085 return jump_table_data;
5088 /* Make an insn of code BARRIER
5089 and add it to the end of the doubly-linked list. */
5091 rtx_barrier *
5092 emit_barrier (void)
5094 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5095 INSN_UID (barrier) = cur_insn_uid++;
5096 add_insn (barrier);
5097 return barrier;
5100 /* Emit a copy of note ORIG. */
5102 rtx_note *
5103 emit_note_copy (rtx_note *orig)
5105 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5106 rtx_note *note = make_note_raw (kind);
5107 NOTE_DATA (note) = NOTE_DATA (orig);
5108 add_insn (note);
5109 return note;
5112 /* Make an insn of code NOTE or type NOTE_NO
5113 and add it to the end of the doubly-linked list. */
5115 rtx_note *
5116 emit_note (enum insn_note kind)
5118 rtx_note *note = make_note_raw (kind);
5119 add_insn (note);
5120 return note;
5123 /* Emit a clobber of lvalue X. */
5125 rtx_insn *
5126 emit_clobber (rtx x)
5128 /* CONCATs should not appear in the insn stream. */
5129 if (GET_CODE (x) == CONCAT)
5131 emit_clobber (XEXP (x, 0));
5132 return emit_clobber (XEXP (x, 1));
5134 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5137 /* Return a sequence of insns to clobber lvalue X. */
5139 rtx_insn *
5140 gen_clobber (rtx x)
5142 rtx_insn *seq;
5144 start_sequence ();
5145 emit_clobber (x);
5146 seq = get_insns ();
5147 end_sequence ();
5148 return seq;
5151 /* Emit a use of rvalue X. */
5153 rtx_insn *
5154 emit_use (rtx x)
5156 /* CONCATs should not appear in the insn stream. */
5157 if (GET_CODE (x) == CONCAT)
5159 emit_use (XEXP (x, 0));
5160 return emit_use (XEXP (x, 1));
5162 return emit_insn (gen_rtx_USE (VOIDmode, x));
5165 /* Return a sequence of insns to use rvalue X. */
5167 rtx_insn *
5168 gen_use (rtx x)
5170 rtx_insn *seq;
5172 start_sequence ();
5173 emit_use (x);
5174 seq = get_insns ();
5175 end_sequence ();
5176 return seq;
5179 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5180 Return the set in INSN that such notes describe, or NULL if the notes
5181 have no meaning for INSN. */
5184 set_for_reg_notes (rtx insn)
5186 rtx pat, reg;
5188 if (!INSN_P (insn))
5189 return NULL_RTX;
5191 pat = PATTERN (insn);
5192 if (GET_CODE (pat) == PARALLEL)
5194 /* We do not use single_set because that ignores SETs of unused
5195 registers. REG_EQUAL and REG_EQUIV notes really do require the
5196 PARALLEL to have a single SET. */
5197 if (multiple_sets (insn))
5198 return NULL_RTX;
5199 pat = XVECEXP (pat, 0, 0);
5202 if (GET_CODE (pat) != SET)
5203 return NULL_RTX;
5205 reg = SET_DEST (pat);
5207 /* Notes apply to the contents of a STRICT_LOW_PART. */
5208 if (GET_CODE (reg) == STRICT_LOW_PART)
5209 reg = XEXP (reg, 0);
5211 /* Check that we have a register. */
5212 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5213 return NULL_RTX;
5215 return pat;
5218 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5219 note of this type already exists, remove it first. */
5222 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5224 rtx note = find_reg_note (insn, kind, NULL_RTX);
5226 switch (kind)
5228 case REG_EQUAL:
5229 case REG_EQUIV:
5230 if (!set_for_reg_notes (insn))
5231 return NULL_RTX;
5233 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5234 It serves no useful purpose and breaks eliminate_regs. */
5235 if (GET_CODE (datum) == ASM_OPERANDS)
5236 return NULL_RTX;
5238 /* Notes with side effects are dangerous. Even if the side-effect
5239 initially mirrors one in PATTERN (INSN), later optimizations
5240 might alter the way that the final register value is calculated
5241 and so move or alter the side-effect in some way. The note would
5242 then no longer be a valid substitution for SET_SRC. */
5243 if (side_effects_p (datum))
5244 return NULL_RTX;
5245 break;
5247 default:
5248 break;
5251 if (note)
5252 XEXP (note, 0) = datum;
5253 else
5255 add_reg_note (insn, kind, datum);
5256 note = REG_NOTES (insn);
5259 switch (kind)
5261 case REG_EQUAL:
5262 case REG_EQUIV:
5263 df_notes_rescan (as_a <rtx_insn *> (insn));
5264 break;
5265 default:
5266 break;
5269 return note;
5272 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5274 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5276 rtx set = set_for_reg_notes (insn);
5278 if (set && SET_DEST (set) == dst)
5279 return set_unique_reg_note (insn, kind, datum);
5280 return NULL_RTX;
5283 /* Return an indication of which type of insn should have X as a body.
5284 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5286 static enum rtx_code
5287 classify_insn (rtx x)
5289 if (LABEL_P (x))
5290 return CODE_LABEL;
5291 if (GET_CODE (x) == CALL)
5292 return CALL_INSN;
5293 if (ANY_RETURN_P (x))
5294 return JUMP_INSN;
5295 if (GET_CODE (x) == SET)
5297 if (SET_DEST (x) == pc_rtx)
5298 return JUMP_INSN;
5299 else if (GET_CODE (SET_SRC (x)) == CALL)
5300 return CALL_INSN;
5301 else
5302 return INSN;
5304 if (GET_CODE (x) == PARALLEL)
5306 int j;
5307 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5308 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5309 return CALL_INSN;
5310 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5311 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5312 return JUMP_INSN;
5313 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5314 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5315 return CALL_INSN;
5317 return INSN;
5320 /* Emit the rtl pattern X as an appropriate kind of insn.
5321 If X is a label, it is simply added into the insn chain. */
5323 rtx_insn *
5324 emit (rtx x)
5326 enum rtx_code code = classify_insn (x);
5328 switch (code)
5330 case CODE_LABEL:
5331 return emit_label (x);
5332 case INSN:
5333 return emit_insn (x);
5334 case JUMP_INSN:
5336 rtx_insn *insn = emit_jump_insn (x);
5337 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5338 return emit_barrier ();
5339 return insn;
5341 case CALL_INSN:
5342 return emit_call_insn (x);
5343 case DEBUG_INSN:
5344 return emit_debug_insn (x);
5345 default:
5346 gcc_unreachable ();
5350 /* Space for free sequence stack entries. */
5351 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5353 /* Begin emitting insns to a sequence. If this sequence will contain
5354 something that might cause the compiler to pop arguments to function
5355 calls (because those pops have previously been deferred; see
5356 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5357 before calling this function. That will ensure that the deferred
5358 pops are not accidentally emitted in the middle of this sequence. */
5360 void
5361 start_sequence (void)
5363 struct sequence_stack *tem;
5365 if (free_sequence_stack != NULL)
5367 tem = free_sequence_stack;
5368 free_sequence_stack = tem->next;
5370 else
5371 tem = ggc_alloc<sequence_stack> ();
5373 tem->next = seq_stack;
5374 tem->first = get_insns ();
5375 tem->last = get_last_insn ();
5377 seq_stack = tem;
5379 set_first_insn (0);
5380 set_last_insn (0);
5383 /* Set up the insn chain starting with FIRST as the current sequence,
5384 saving the previously current one. See the documentation for
5385 start_sequence for more information about how to use this function. */
5387 void
5388 push_to_sequence (rtx_insn *first)
5390 rtx_insn *last;
5392 start_sequence ();
5394 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5397 set_first_insn (first);
5398 set_last_insn (last);
5401 /* Like push_to_sequence, but take the last insn as an argument to avoid
5402 looping through the list. */
5404 void
5405 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5407 start_sequence ();
5409 set_first_insn (first);
5410 set_last_insn (last);
5413 /* Set up the outer-level insn chain
5414 as the current sequence, saving the previously current one. */
5416 void
5417 push_topmost_sequence (void)
5419 struct sequence_stack *stack, *top = NULL;
5421 start_sequence ();
5423 for (stack = seq_stack; stack; stack = stack->next)
5424 top = stack;
5426 set_first_insn (top->first);
5427 set_last_insn (top->last);
5430 /* After emitting to the outer-level insn chain, update the outer-level
5431 insn chain, and restore the previous saved state. */
5433 void
5434 pop_topmost_sequence (void)
5436 struct sequence_stack *stack, *top = NULL;
5438 for (stack = seq_stack; stack; stack = stack->next)
5439 top = stack;
5441 top->first = get_insns ();
5442 top->last = get_last_insn ();
5444 end_sequence ();
5447 /* After emitting to a sequence, restore previous saved state.
5449 To get the contents of the sequence just made, you must call
5450 `get_insns' *before* calling here.
5452 If the compiler might have deferred popping arguments while
5453 generating this sequence, and this sequence will not be immediately
5454 inserted into the instruction stream, use do_pending_stack_adjust
5455 before calling get_insns. That will ensure that the deferred
5456 pops are inserted into this sequence, and not into some random
5457 location in the instruction stream. See INHIBIT_DEFER_POP for more
5458 information about deferred popping of arguments. */
5460 void
5461 end_sequence (void)
5463 struct sequence_stack *tem = seq_stack;
5465 set_first_insn (tem->first);
5466 set_last_insn (tem->last);
5467 seq_stack = tem->next;
5469 memset (tem, 0, sizeof (*tem));
5470 tem->next = free_sequence_stack;
5471 free_sequence_stack = tem;
5474 /* Return 1 if currently emitting into a sequence. */
5477 in_sequence_p (void)
5479 return seq_stack != 0;
5482 /* Put the various virtual registers into REGNO_REG_RTX. */
5484 static void
5485 init_virtual_regs (void)
5487 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5488 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5489 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5490 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5491 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5492 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5493 = virtual_preferred_stack_boundary_rtx;
5497 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5498 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5499 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5500 static int copy_insn_n_scratches;
5502 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5503 copied an ASM_OPERANDS.
5504 In that case, it is the original input-operand vector. */
5505 static rtvec orig_asm_operands_vector;
5507 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5508 copied an ASM_OPERANDS.
5509 In that case, it is the copied input-operand vector. */
5510 static rtvec copy_asm_operands_vector;
5512 /* Likewise for the constraints vector. */
5513 static rtvec orig_asm_constraints_vector;
5514 static rtvec copy_asm_constraints_vector;
5516 /* Recursively create a new copy of an rtx for copy_insn.
5517 This function differs from copy_rtx in that it handles SCRATCHes and
5518 ASM_OPERANDs properly.
5519 Normally, this function is not used directly; use copy_insn as front end.
5520 However, you could first copy an insn pattern with copy_insn and then use
5521 this function afterwards to properly copy any REG_NOTEs containing
5522 SCRATCHes. */
5525 copy_insn_1 (rtx orig)
5527 rtx copy;
5528 int i, j;
5529 RTX_CODE code;
5530 const char *format_ptr;
5532 if (orig == NULL)
5533 return NULL;
5535 code = GET_CODE (orig);
5537 switch (code)
5539 case REG:
5540 case DEBUG_EXPR:
5541 CASE_CONST_ANY:
5542 case SYMBOL_REF:
5543 case CODE_LABEL:
5544 case PC:
5545 case CC0:
5546 case RETURN:
5547 case SIMPLE_RETURN:
5548 return orig;
5549 case CLOBBER:
5550 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5551 clobbers or clobbers of hard registers that originated as pseudos.
5552 This is needed to allow safe register renaming. */
5553 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5554 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5555 return orig;
5556 break;
5558 case SCRATCH:
5559 for (i = 0; i < copy_insn_n_scratches; i++)
5560 if (copy_insn_scratch_in[i] == orig)
5561 return copy_insn_scratch_out[i];
5562 break;
5564 case CONST:
5565 if (shared_const_p (orig))
5566 return orig;
5567 break;
5569 /* A MEM with a constant address is not sharable. The problem is that
5570 the constant address may need to be reloaded. If the mem is shared,
5571 then reloading one copy of this mem will cause all copies to appear
5572 to have been reloaded. */
5574 default:
5575 break;
5578 /* Copy the various flags, fields, and other information. We assume
5579 that all fields need copying, and then clear the fields that should
5580 not be copied. That is the sensible default behavior, and forces
5581 us to explicitly document why we are *not* copying a flag. */
5582 copy = shallow_copy_rtx (orig);
5584 /* We do not copy the USED flag, which is used as a mark bit during
5585 walks over the RTL. */
5586 RTX_FLAG (copy, used) = 0;
5588 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5589 if (INSN_P (orig))
5591 RTX_FLAG (copy, jump) = 0;
5592 RTX_FLAG (copy, call) = 0;
5593 RTX_FLAG (copy, frame_related) = 0;
5596 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5598 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5599 switch (*format_ptr++)
5601 case 'e':
5602 if (XEXP (orig, i) != NULL)
5603 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5604 break;
5606 case 'E':
5607 case 'V':
5608 if (XVEC (orig, i) == orig_asm_constraints_vector)
5609 XVEC (copy, i) = copy_asm_constraints_vector;
5610 else if (XVEC (orig, i) == orig_asm_operands_vector)
5611 XVEC (copy, i) = copy_asm_operands_vector;
5612 else if (XVEC (orig, i) != NULL)
5614 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5615 for (j = 0; j < XVECLEN (copy, i); j++)
5616 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5618 break;
5620 case 't':
5621 case 'w':
5622 case 'i':
5623 case 's':
5624 case 'S':
5625 case 'u':
5626 case '0':
5627 /* These are left unchanged. */
5628 break;
5630 default:
5631 gcc_unreachable ();
5634 if (code == SCRATCH)
5636 i = copy_insn_n_scratches++;
5637 gcc_assert (i < MAX_RECOG_OPERANDS);
5638 copy_insn_scratch_in[i] = orig;
5639 copy_insn_scratch_out[i] = copy;
5641 else if (code == ASM_OPERANDS)
5643 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5644 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5645 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5646 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5649 return copy;
5652 /* Create a new copy of an rtx.
5653 This function differs from copy_rtx in that it handles SCRATCHes and
5654 ASM_OPERANDs properly.
5655 INSN doesn't really have to be a full INSN; it could be just the
5656 pattern. */
5658 copy_insn (rtx insn)
5660 copy_insn_n_scratches = 0;
5661 orig_asm_operands_vector = 0;
5662 orig_asm_constraints_vector = 0;
5663 copy_asm_operands_vector = 0;
5664 copy_asm_constraints_vector = 0;
5665 return copy_insn_1 (insn);
5668 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5669 on that assumption that INSN itself remains in its original place. */
5671 rtx_insn *
5672 copy_delay_slot_insn (rtx_insn *insn)
5674 /* Copy INSN with its rtx_code, all its notes, location etc. */
5675 insn = as_a <rtx_insn *> (copy_rtx (insn));
5676 INSN_UID (insn) = cur_insn_uid++;
5677 return insn;
5680 /* Initialize data structures and variables in this file
5681 before generating rtl for each function. */
5683 void
5684 init_emit (void)
5686 set_first_insn (NULL);
5687 set_last_insn (NULL);
5688 if (MIN_NONDEBUG_INSN_UID)
5689 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5690 else
5691 cur_insn_uid = 1;
5692 cur_debug_insn_uid = 1;
5693 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5694 first_label_num = label_num;
5695 seq_stack = NULL;
5697 /* Init the tables that describe all the pseudo regs. */
5699 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5701 crtl->emit.regno_pointer_align
5702 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5704 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5706 /* Put copies of all the hard registers into regno_reg_rtx. */
5707 memcpy (regno_reg_rtx,
5708 initial_regno_reg_rtx,
5709 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5711 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5712 init_virtual_regs ();
5714 /* Indicate that the virtual registers and stack locations are
5715 all pointers. */
5716 REG_POINTER (stack_pointer_rtx) = 1;
5717 REG_POINTER (frame_pointer_rtx) = 1;
5718 REG_POINTER (hard_frame_pointer_rtx) = 1;
5719 REG_POINTER (arg_pointer_rtx) = 1;
5721 REG_POINTER (virtual_incoming_args_rtx) = 1;
5722 REG_POINTER (virtual_stack_vars_rtx) = 1;
5723 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5724 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5725 REG_POINTER (virtual_cfa_rtx) = 1;
5727 #ifdef STACK_BOUNDARY
5728 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5730 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5731 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5733 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5736 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5737 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5738 #endif
5740 #ifdef INIT_EXPANDERS
5741 INIT_EXPANDERS;
5742 #endif
5745 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5747 static rtx
5748 gen_const_vector (machine_mode mode, int constant)
5750 rtx tem;
5751 rtvec v;
5752 int units, i;
5753 machine_mode inner;
5755 units = GET_MODE_NUNITS (mode);
5756 inner = GET_MODE_INNER (mode);
5758 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5760 v = rtvec_alloc (units);
5762 /* We need to call this function after we set the scalar const_tiny_rtx
5763 entries. */
5764 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5766 for (i = 0; i < units; ++i)
5767 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5769 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5770 return tem;
5773 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5774 all elements are zero, and the one vector when all elements are one. */
5776 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5778 machine_mode inner = GET_MODE_INNER (mode);
5779 int nunits = GET_MODE_NUNITS (mode);
5780 rtx x;
5781 int i;
5783 /* Check to see if all of the elements have the same value. */
5784 x = RTVEC_ELT (v, nunits - 1);
5785 for (i = nunits - 2; i >= 0; i--)
5786 if (RTVEC_ELT (v, i) != x)
5787 break;
5789 /* If the values are all the same, check to see if we can use one of the
5790 standard constant vectors. */
5791 if (i == -1)
5793 if (x == CONST0_RTX (inner))
5794 return CONST0_RTX (mode);
5795 else if (x == CONST1_RTX (inner))
5796 return CONST1_RTX (mode);
5797 else if (x == CONSTM1_RTX (inner))
5798 return CONSTM1_RTX (mode);
5801 return gen_rtx_raw_CONST_VECTOR (mode, v);
5804 /* Initialise global register information required by all functions. */
5806 void
5807 init_emit_regs (void)
5809 int i;
5810 machine_mode mode;
5811 mem_attrs *attrs;
5813 /* Reset register attributes */
5814 htab_empty (reg_attrs_htab);
5816 /* We need reg_raw_mode, so initialize the modes now. */
5817 init_reg_modes_target ();
5819 /* Assign register numbers to the globally defined register rtx. */
5820 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5821 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5822 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5823 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5824 virtual_incoming_args_rtx =
5825 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5826 virtual_stack_vars_rtx =
5827 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5828 virtual_stack_dynamic_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5830 virtual_outgoing_args_rtx =
5831 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5832 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5833 virtual_preferred_stack_boundary_rtx =
5834 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5836 /* Initialize RTL for commonly used hard registers. These are
5837 copied into regno_reg_rtx as we begin to compile each function. */
5838 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5839 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5841 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5842 return_address_pointer_rtx
5843 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5844 #endif
5846 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5847 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5848 else
5849 pic_offset_table_rtx = NULL_RTX;
5851 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5853 mode = (machine_mode) i;
5854 attrs = ggc_cleared_alloc<mem_attrs> ();
5855 attrs->align = BITS_PER_UNIT;
5856 attrs->addrspace = ADDR_SPACE_GENERIC;
5857 if (mode != BLKmode)
5859 attrs->size_known_p = true;
5860 attrs->size = GET_MODE_SIZE (mode);
5861 if (STRICT_ALIGNMENT)
5862 attrs->align = GET_MODE_ALIGNMENT (mode);
5864 mode_mem_attrs[i] = attrs;
5868 /* Initialize global machine_mode variables. */
5870 void
5871 init_derived_machine_modes (void)
5873 byte_mode = VOIDmode;
5874 word_mode = VOIDmode;
5876 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5877 mode != VOIDmode;
5878 mode = GET_MODE_WIDER_MODE (mode))
5880 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5881 && byte_mode == VOIDmode)
5882 byte_mode = mode;
5884 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5885 && word_mode == VOIDmode)
5886 word_mode = mode;
5889 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5892 /* Create some permanent unique rtl objects shared between all functions. */
5894 void
5895 init_emit_once (void)
5897 int i;
5898 machine_mode mode;
5899 machine_mode double_mode;
5901 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5902 CONST_FIXED, and memory attribute hash tables. */
5903 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5904 const_int_htab_eq, NULL);
5906 #if TARGET_SUPPORTS_WIDE_INT
5907 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5908 const_wide_int_htab_eq, NULL);
5909 #endif
5910 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5911 const_double_htab_eq, NULL);
5913 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5914 const_fixed_htab_eq, NULL);
5916 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5917 reg_attrs_htab_eq, NULL);
5919 #ifdef INIT_EXPANDERS
5920 /* This is to initialize {init|mark|free}_machine_status before the first
5921 call to push_function_context_to. This is needed by the Chill front
5922 end which calls push_function_context_to before the first call to
5923 init_function_start. */
5924 INIT_EXPANDERS;
5925 #endif
5927 /* Create the unique rtx's for certain rtx codes and operand values. */
5929 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5930 tries to use these variables. */
5931 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5932 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5933 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5935 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5936 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5937 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5938 else
5939 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5941 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5943 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5944 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5945 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5947 dconstm1 = dconst1;
5948 dconstm1.sign = 1;
5950 dconsthalf = dconst1;
5951 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5953 for (i = 0; i < 3; i++)
5955 const REAL_VALUE_TYPE *const r =
5956 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5958 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5959 mode != VOIDmode;
5960 mode = GET_MODE_WIDER_MODE (mode))
5961 const_tiny_rtx[i][(int) mode] =
5962 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5964 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5965 mode != VOIDmode;
5966 mode = GET_MODE_WIDER_MODE (mode))
5967 const_tiny_rtx[i][(int) mode] =
5968 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5970 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5973 mode != VOIDmode;
5974 mode = GET_MODE_WIDER_MODE (mode))
5975 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5977 for (mode = MIN_MODE_PARTIAL_INT;
5978 mode <= MAX_MODE_PARTIAL_INT;
5979 mode = (machine_mode)((int)(mode) + 1))
5980 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5983 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5985 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5986 mode != VOIDmode;
5987 mode = GET_MODE_WIDER_MODE (mode))
5988 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5990 for (mode = MIN_MODE_PARTIAL_INT;
5991 mode <= MAX_MODE_PARTIAL_INT;
5992 mode = (machine_mode)((int)(mode) + 1))
5993 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5996 mode != VOIDmode;
5997 mode = GET_MODE_WIDER_MODE (mode))
5999 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6000 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6007 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6008 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6012 mode != VOIDmode;
6013 mode = GET_MODE_WIDER_MODE (mode))
6015 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6016 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6017 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6020 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6021 mode != VOIDmode;
6022 mode = GET_MODE_WIDER_MODE (mode))
6024 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6025 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6028 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6029 mode != VOIDmode;
6030 mode = GET_MODE_WIDER_MODE (mode))
6032 FCONST0 (mode).data.high = 0;
6033 FCONST0 (mode).data.low = 0;
6034 FCONST0 (mode).mode = mode;
6035 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6036 FCONST0 (mode), mode);
6039 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6040 mode != VOIDmode;
6041 mode = GET_MODE_WIDER_MODE (mode))
6043 FCONST0 (mode).data.high = 0;
6044 FCONST0 (mode).data.low = 0;
6045 FCONST0 (mode).mode = mode;
6046 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6047 FCONST0 (mode), mode);
6050 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6051 mode != VOIDmode;
6052 mode = GET_MODE_WIDER_MODE (mode))
6054 FCONST0 (mode).data.high = 0;
6055 FCONST0 (mode).data.low = 0;
6056 FCONST0 (mode).mode = mode;
6057 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6058 FCONST0 (mode), mode);
6060 /* We store the value 1. */
6061 FCONST1 (mode).data.high = 0;
6062 FCONST1 (mode).data.low = 0;
6063 FCONST1 (mode).mode = mode;
6064 FCONST1 (mode).data
6065 = double_int_one.lshift (GET_MODE_FBIT (mode),
6066 HOST_BITS_PER_DOUBLE_INT,
6067 SIGNED_FIXED_POINT_MODE_P (mode));
6068 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6069 FCONST1 (mode), mode);
6072 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6073 mode != VOIDmode;
6074 mode = GET_MODE_WIDER_MODE (mode))
6076 FCONST0 (mode).data.high = 0;
6077 FCONST0 (mode).data.low = 0;
6078 FCONST0 (mode).mode = mode;
6079 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6080 FCONST0 (mode), mode);
6082 /* We store the value 1. */
6083 FCONST1 (mode).data.high = 0;
6084 FCONST1 (mode).data.low = 0;
6085 FCONST1 (mode).mode = mode;
6086 FCONST1 (mode).data
6087 = double_int_one.lshift (GET_MODE_FBIT (mode),
6088 HOST_BITS_PER_DOUBLE_INT,
6089 SIGNED_FIXED_POINT_MODE_P (mode));
6090 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6091 FCONST1 (mode), mode);
6094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6095 mode != VOIDmode;
6096 mode = GET_MODE_WIDER_MODE (mode))
6098 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6101 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6102 mode != VOIDmode;
6103 mode = GET_MODE_WIDER_MODE (mode))
6105 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6108 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6109 mode != VOIDmode;
6110 mode = GET_MODE_WIDER_MODE (mode))
6112 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6113 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6116 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6117 mode != VOIDmode;
6118 mode = GET_MODE_WIDER_MODE (mode))
6120 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6121 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6124 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6125 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6126 const_tiny_rtx[0][i] = const0_rtx;
6128 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6129 if (STORE_FLAG_VALUE == 1)
6130 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6132 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6133 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6134 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6135 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6138 /* Produce exact duplicate of insn INSN after AFTER.
6139 Care updating of libcall regions if present. */
6141 rtx_insn *
6142 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6144 rtx_insn *new_rtx;
6145 rtx link;
6147 switch (GET_CODE (insn))
6149 case INSN:
6150 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6151 break;
6153 case JUMP_INSN:
6154 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6155 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6156 break;
6158 case DEBUG_INSN:
6159 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6160 break;
6162 case CALL_INSN:
6163 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6164 if (CALL_INSN_FUNCTION_USAGE (insn))
6165 CALL_INSN_FUNCTION_USAGE (new_rtx)
6166 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6167 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6168 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6169 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6170 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6171 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6172 break;
6174 default:
6175 gcc_unreachable ();
6178 /* Update LABEL_NUSES. */
6179 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6181 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6183 /* If the old insn is frame related, then so is the new one. This is
6184 primarily needed for IA-64 unwind info which marks epilogue insns,
6185 which may be duplicated by the basic block reordering code. */
6186 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6188 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6189 will make them. REG_LABEL_TARGETs are created there too, but are
6190 supposed to be sticky, so we copy them. */
6191 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6192 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6194 if (GET_CODE (link) == EXPR_LIST)
6195 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6196 copy_insn_1 (XEXP (link, 0)));
6197 else
6198 add_shallow_copy_of_reg_note (new_rtx, link);
6201 INSN_CODE (new_rtx) = INSN_CODE (insn);
6202 return new_rtx;
6205 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6207 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6209 if (hard_reg_clobbers[mode][regno])
6210 return hard_reg_clobbers[mode][regno];
6211 else
6212 return (hard_reg_clobbers[mode][regno] =
6213 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6216 location_t prologue_location;
6217 location_t epilogue_location;
6219 /* Hold current location information and last location information, so the
6220 datastructures are built lazily only when some instructions in given
6221 place are needed. */
6222 static location_t curr_location;
6224 /* Allocate insn location datastructure. */
6225 void
6226 insn_locations_init (void)
6228 prologue_location = epilogue_location = 0;
6229 curr_location = UNKNOWN_LOCATION;
6232 /* At the end of emit stage, clear current location. */
6233 void
6234 insn_locations_finalize (void)
6236 epilogue_location = curr_location;
6237 curr_location = UNKNOWN_LOCATION;
6240 /* Set current location. */
6241 void
6242 set_curr_insn_location (location_t location)
6244 curr_location = location;
6247 /* Get current location. */
6248 location_t
6249 curr_insn_location (void)
6251 return curr_location;
6254 /* Return lexical scope block insn belongs to. */
6255 tree
6256 insn_scope (const rtx_insn *insn)
6258 return LOCATION_BLOCK (INSN_LOCATION (insn));
6261 /* Return line number of the statement that produced this insn. */
6263 insn_line (const rtx_insn *insn)
6265 return LOCATION_LINE (INSN_LOCATION (insn));
6268 /* Return source file of the statement that produced this insn. */
6269 const char *
6270 insn_file (const rtx_insn *insn)
6272 return LOCATION_FILE (INSN_LOCATION (insn));
6275 /* Return expanded location of the statement that produced this insn. */
6276 expanded_location
6277 insn_location (const rtx_insn *insn)
6279 return expand_location (INSN_LOCATION (insn));
6282 /* Return true if memory model MODEL requires a pre-operation (release-style)
6283 barrier or a post-operation (acquire-style) barrier. While not universal,
6284 this function matches behavior of several targets. */
6286 bool
6287 need_atomic_barrier_p (enum memmodel model, bool pre)
6289 switch (model & MEMMODEL_MASK)
6291 case MEMMODEL_RELAXED:
6292 case MEMMODEL_CONSUME:
6293 return false;
6294 case MEMMODEL_RELEASE:
6295 return pre;
6296 case MEMMODEL_ACQUIRE:
6297 return !pre;
6298 case MEMMODEL_ACQ_REL:
6299 case MEMMODEL_SEQ_CST:
6300 return true;
6301 default:
6302 gcc_unreachable ();
6306 #include "gt-emit-rtl.h"