Skip -fwhole-program when merging LTO options.
[official-gcc.git] / gcc / emit-rtl.cc
blobf25fb70ab974596f2007c48db36bd141bb926d92
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.cc, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
80 /* Datastructures maintained for currently processed function in RTL form. */
82 struct rtl_data x_rtl;
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
89 rtx * regno_reg_rtx;
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
94 static GTY(()) int label_num = 1;
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
103 rtx const_true_rtx;
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110 REAL_VALUE_TYPE dconstinf;
111 REAL_VALUE_TYPE dconstninf;
113 /* Record fixed-point constant 0 and 1. */
114 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
117 /* We make one copy of (const_int C) where C is in
118 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
119 to save space during the compilation and simplify comparisons of
120 integers. */
122 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
124 /* Standard pieces of rtx, to be substituted directly into things. */
125 rtx pc_rtx;
126 rtx ret_rtx;
127 rtx simple_return_rtx;
129 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
130 this pointer should normally never be dereferenced), but is required to be
131 distinct from NULL_RTX. Currently used by peephole2 pass. */
132 rtx_insn *invalid_insn_rtx;
134 /* A hash table storing CONST_INTs whose absolute value is greater
135 than MAX_SAVED_CONST_INT. */
137 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
139 typedef HOST_WIDE_INT compare_type;
141 static hashval_t hash (rtx i);
142 static bool equal (rtx i, HOST_WIDE_INT h);
145 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
149 static hashval_t hash (rtx x);
150 static bool equal (rtx x, rtx y);
153 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
155 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
157 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
159 static hashval_t hash (rtx x);
160 static bool equal (rtx x, const compare_type &y);
163 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
165 /* A hash table storing register attribute structures. */
166 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
168 static hashval_t hash (reg_attrs *x);
169 static bool equal (reg_attrs *a, reg_attrs *b);
172 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
174 /* A hash table storing all CONST_DOUBLEs. */
175 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
181 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
183 /* A hash table storing all CONST_FIXEDs. */
184 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
186 static hashval_t hash (rtx x);
187 static bool equal (rtx x, rtx y);
190 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
192 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
193 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
194 #define first_label_num (crtl->emit.x_first_label_num)
196 static void set_used_decls (tree);
197 static void mark_label_nuses (rtx);
198 #if TARGET_SUPPORTS_WIDE_INT
199 static rtx lookup_const_wide_int (rtx);
200 #endif
201 static rtx lookup_const_double (rtx);
202 static rtx lookup_const_fixed (rtx);
203 static rtx gen_const_vector (machine_mode, int);
204 static void copy_rtx_if_shared_1 (rtx *orig);
206 /* Probability of the conditional branch currently proceeded by try_split. */
207 profile_probability split_branch_probability;
209 /* Returns a hash code for X (which is a really a CONST_INT). */
211 hashval_t
212 const_int_hasher::hash (rtx x)
214 return (hashval_t) INTVAL (x);
217 /* Returns nonzero if the value represented by X (which is really a
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
221 bool
222 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
224 return (INTVAL (x) == y);
227 #if TARGET_SUPPORTS_WIDE_INT
228 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
230 hashval_t
231 const_wide_int_hasher::hash (rtx x)
233 int i;
234 unsigned HOST_WIDE_INT hash = 0;
235 const_rtx xr = x;
237 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
238 hash += CONST_WIDE_INT_ELT (xr, i);
240 return (hashval_t) hash;
243 /* Returns nonzero if the value represented by X (which is really a
244 CONST_WIDE_INT) is the same as that given by Y (which is really a
245 CONST_WIDE_INT). */
247 bool
248 const_wide_int_hasher::equal (rtx x, rtx y)
250 int i;
251 const_rtx xr = x;
252 const_rtx yr = y;
253 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
254 return false;
256 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
257 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
258 return false;
260 return true;
262 #endif
264 /* Returns a hash code for CONST_POLY_INT X. */
266 hashval_t
267 const_poly_int_hasher::hash (rtx x)
269 inchash::hash h;
270 h.add_int (GET_MODE (x));
271 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
272 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
273 return h.end ();
276 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
278 bool
279 const_poly_int_hasher::equal (rtx x, const compare_type &y)
281 if (GET_MODE (x) != y.first)
282 return false;
283 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
284 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
285 return false;
286 return true;
289 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
290 hashval_t
291 const_double_hasher::hash (rtx x)
293 const_rtx const value = x;
294 hashval_t h;
296 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
297 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
298 else
300 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
304 return h;
307 /* Returns nonzero if the value represented by X (really a ...)
308 is the same as that represented by Y (really a ...) */
309 bool
310 const_double_hasher::equal (rtx x, rtx y)
312 const_rtx const a = x, b = y;
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
316 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
317 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
318 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
319 else
320 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
321 CONST_DOUBLE_REAL_VALUE (b));
324 /* Returns a hash code for X (which is really a CONST_FIXED). */
326 hashval_t
327 const_fixed_hasher::hash (rtx x)
329 const_rtx const value = x;
330 hashval_t h;
332 h = fixed_hash (CONST_FIXED_VALUE (value));
333 /* MODE is used in the comparison, so it should be in the hash. */
334 h ^= GET_MODE (value);
335 return h;
338 /* Returns nonzero if the value represented by X is the same as that
339 represented by Y. */
341 bool
342 const_fixed_hasher::equal (rtx x, rtx y)
344 const_rtx const a = x, b = y;
346 if (GET_MODE (a) != GET_MODE (b))
347 return 0;
348 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
351 /* Return true if the given memory attributes are equal. */
353 bool
354 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
356 if (p == q)
357 return true;
358 if (!p || !q)
359 return false;
360 return (p->alias == q->alias
361 && p->offset_known_p == q->offset_known_p
362 && (!p->offset_known_p || known_eq (p->offset, q->offset))
363 && p->size_known_p == q->size_known_p
364 && (!p->size_known_p || known_eq (p->size, q->size))
365 && p->align == q->align
366 && p->addrspace == q->addrspace
367 && (p->expr == q->expr
368 || (p->expr != NULL_TREE && q->expr != NULL_TREE
369 && operand_equal_p (p->expr, q->expr, 0))));
372 /* Set MEM's memory attributes so that they are the same as ATTRS. */
374 static void
375 set_mem_attrs (rtx mem, mem_attrs *attrs)
377 /* If everything is the default, we can just clear the attributes. */
378 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
380 MEM_ATTRS (mem) = 0;
381 return;
384 if (!MEM_ATTRS (mem)
385 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
387 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
388 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
392 /* Returns a hash code for X (which is a really a reg_attrs *). */
394 hashval_t
395 reg_attr_hasher::hash (reg_attrs *x)
397 const reg_attrs *const p = x;
399 inchash::hash h;
400 h.add_ptr (p->decl);
401 h.add_poly_hwi (p->offset);
402 return h.end ();
405 /* Returns nonzero if the value represented by X is the same as that given by
406 Y. */
408 bool
409 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
411 const reg_attrs *const p = x;
412 const reg_attrs *const q = y;
414 return (p->decl == q->decl && known_eq (p->offset, q->offset));
416 /* Allocate a new reg_attrs structure and insert it into the hash table if
417 one identical to it is not already in the table. We are doing this for
418 MEM of mode MODE. */
420 static reg_attrs *
421 get_reg_attrs (tree decl, poly_int64 offset)
423 reg_attrs attrs;
425 /* If everything is the default, we can just return zero. */
426 if (decl == 0 && known_eq (offset, 0))
427 return 0;
429 attrs.decl = decl;
430 attrs.offset = offset;
432 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
433 if (*slot == 0)
435 *slot = ggc_alloc<reg_attrs> ();
436 memcpy (*slot, &attrs, sizeof (reg_attrs));
439 return *slot;
443 #if !HAVE_blockage
444 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
445 and to block register equivalences to be seen across this insn. */
448 gen_blockage (void)
450 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
451 MEM_VOLATILE_P (x) = true;
452 return x;
454 #endif
457 /* Set the mode and register number of X to MODE and REGNO. */
459 void
460 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
462 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
463 ? hard_regno_nregs (regno, mode)
464 : 1);
465 PUT_MODE_RAW (x, mode);
466 set_regno_raw (x, regno, nregs);
469 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
472 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
474 set_mode_and_regno (x, mode, regno);
475 REG_ATTRS (x) = NULL;
476 ORIGINAL_REGNO (x) = regno;
477 return x;
480 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
481 don't attempt to share with the various global pieces of rtl (such as
482 frame_pointer_rtx). */
485 gen_raw_REG (machine_mode mode, unsigned int regno)
487 rtx x = rtx_alloc (REG MEM_STAT_INFO);
488 init_raw_REG (x, mode, regno);
489 return x;
492 /* There are some RTL codes that require special attention; the generation
493 functions do the raw handling. If you add to this list, modify
494 special_rtx in gengenrtl.cc as well. */
496 rtx_expr_list *
497 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
499 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
500 expr_list));
503 rtx_insn_list *
504 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
506 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
507 insn_list));
510 rtx_insn *
511 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
512 basic_block bb, rtx pattern, int location, int code,
513 rtx reg_notes)
515 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
516 prev_insn, next_insn,
517 bb, pattern, location, code,
518 reg_notes));
522 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
524 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
525 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
527 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
528 if (const_true_rtx && arg == STORE_FLAG_VALUE)
529 return const_true_rtx;
530 #endif
532 /* Look up the CONST_INT in the hash table. */
533 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
534 INSERT);
535 if (*slot == 0)
536 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
538 return *slot;
542 gen_int_mode (poly_int64 c, machine_mode mode)
544 c = trunc_int_for_mode (c, mode);
545 if (c.is_constant ())
546 return GEN_INT (c.coeffs[0]);
547 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
548 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
551 /* CONST_DOUBLEs might be created from pairs of integers, or from
552 REAL_VALUE_TYPEs. Also, their length is known only at run time,
553 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
555 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
556 hash table. If so, return its counterpart; otherwise add it
557 to the hash table and return it. */
558 static rtx
559 lookup_const_double (rtx real)
561 rtx *slot = const_double_htab->find_slot (real, INSERT);
562 if (*slot == 0)
563 *slot = real;
565 return *slot;
568 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
569 VALUE in mode MODE. */
571 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
573 rtx real = rtx_alloc (CONST_DOUBLE);
574 PUT_MODE (real, mode);
576 real->u.rv = value;
578 return lookup_const_double (real);
581 /* Determine whether FIXED, a CONST_FIXED, already exists in the
582 hash table. If so, return its counterpart; otherwise add it
583 to the hash table and return it. */
585 static rtx
586 lookup_const_fixed (rtx fixed)
588 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
589 if (*slot == 0)
590 *slot = fixed;
592 return *slot;
595 /* Return a CONST_FIXED rtx for a fixed-point value specified by
596 VALUE in mode MODE. */
599 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
601 rtx fixed = rtx_alloc (CONST_FIXED);
602 PUT_MODE (fixed, mode);
604 fixed->u.fv = value;
606 return lookup_const_fixed (fixed);
609 #if TARGET_SUPPORTS_WIDE_INT == 0
610 /* Constructs double_int from rtx CST. */
612 double_int
613 rtx_to_double_int (const_rtx cst)
615 double_int r;
617 if (CONST_INT_P (cst))
618 r = double_int::from_shwi (INTVAL (cst));
619 else if (CONST_DOUBLE_AS_INT_P (cst))
621 r.low = CONST_DOUBLE_LOW (cst);
622 r.high = CONST_DOUBLE_HIGH (cst);
624 else
625 gcc_unreachable ();
627 return r;
629 #endif
631 #if TARGET_SUPPORTS_WIDE_INT
632 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
633 If so, return its counterpart; otherwise add it to the hash table and
634 return it. */
636 static rtx
637 lookup_const_wide_int (rtx wint)
639 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
640 if (*slot == 0)
641 *slot = wint;
643 return *slot;
645 #endif
647 /* Return an rtx constant for V, given that the constant has mode MODE.
648 The returned rtx will be a CONST_INT if V fits, otherwise it will be
649 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
650 (if TARGET_SUPPORTS_WIDE_INT). */
652 static rtx
653 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
655 unsigned int len = v.get_len ();
656 /* Not scalar_int_mode because we also allow pointer bound modes. */
657 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
659 /* Allow truncation but not extension since we do not know if the
660 number is signed or unsigned. */
661 gcc_assert (prec <= v.get_precision ());
663 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
664 return gen_int_mode (v.elt (0), mode);
666 #if TARGET_SUPPORTS_WIDE_INT
668 unsigned int i;
669 rtx value;
670 unsigned int blocks_needed
671 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
673 if (len > blocks_needed)
674 len = blocks_needed;
676 value = const_wide_int_alloc (len);
678 /* It is so tempting to just put the mode in here. Must control
679 myself ... */
680 PUT_MODE (value, VOIDmode);
681 CWI_PUT_NUM_ELEM (value, len);
683 for (i = 0; i < len; i++)
684 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
686 return lookup_const_wide_int (value);
688 #else
689 return immed_double_const (v.elt (0), v.elt (1), mode);
690 #endif
693 #if TARGET_SUPPORTS_WIDE_INT == 0
694 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
695 of ints: I0 is the low-order word and I1 is the high-order word.
696 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
697 implied upper bits are copies of the high bit of i1. The value
698 itself is neither signed nor unsigned. Do not use this routine for
699 non-integer modes; convert to REAL_VALUE_TYPE and use
700 const_double_from_real_value. */
703 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
705 rtx value;
706 unsigned int i;
708 /* There are the following cases (note that there are no modes with
709 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
711 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
712 gen_int_mode.
713 2) If the value of the integer fits into HOST_WIDE_INT anyway
714 (i.e., i1 consists only from copies of the sign bit, and sign
715 of i0 and i1 are the same), then we return a CONST_INT for i0.
716 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
717 scalar_mode smode;
718 if (is_a <scalar_mode> (mode, &smode)
719 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
720 return gen_int_mode (i0, mode);
722 /* If this integer fits in one word, return a CONST_INT. */
723 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
724 return GEN_INT (i0);
726 /* We use VOIDmode for integers. */
727 value = rtx_alloc (CONST_DOUBLE);
728 PUT_MODE (value, VOIDmode);
730 CONST_DOUBLE_LOW (value) = i0;
731 CONST_DOUBLE_HIGH (value) = i1;
733 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
734 XWINT (value, i) = 0;
736 return lookup_const_double (value);
738 #endif
740 /* Return an rtx representation of C in mode MODE. */
743 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
745 if (c.is_constant ())
746 return immed_wide_int_const_1 (c.coeffs[0], mode);
748 /* Not scalar_int_mode because we also allow pointer bound modes. */
749 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
751 /* Allow truncation but not extension since we do not know if the
752 number is signed or unsigned. */
753 gcc_assert (prec <= c.coeffs[0].get_precision ());
754 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
756 /* See whether we already have an rtx for this constant. */
757 inchash::hash h;
758 h.add_int (mode);
759 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
760 h.add_wide_int (newc.coeffs[i]);
761 const_poly_int_hasher::compare_type typed_value (mode, newc);
762 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
763 h.end (), INSERT);
764 rtx x = *slot;
765 if (x)
766 return x;
768 /* Create a new rtx. There's a choice to be made here between installing
769 the actual mode of the rtx or leaving it as VOIDmode (for consistency
770 with CONST_INT). In practice the handling of the codes is different
771 enough that we get no benefit from using VOIDmode, and various places
772 assume that VOIDmode implies CONST_INT. Using the real mode seems like
773 the right long-term direction anyway. */
774 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
775 size_t extra_size = twi::extra_size (prec);
776 x = rtx_alloc_v (CONST_POLY_INT,
777 sizeof (struct const_poly_int_def) + extra_size);
778 PUT_MODE (x, mode);
779 CONST_POLY_INT_COEFFS (x).set_precision (prec);
780 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
781 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
783 *slot = x;
784 return x;
788 gen_rtx_REG (machine_mode mode, unsigned int regno)
790 /* In case the MD file explicitly references the frame pointer, have
791 all such references point to the same frame pointer. This is
792 used during frame pointer elimination to distinguish the explicit
793 references to these registers from pseudos that happened to be
794 assigned to them.
796 If we have eliminated the frame pointer or arg pointer, we will
797 be using it as a normal register, for example as a spill
798 register. In such cases, we might be accessing it in a mode that
799 is not Pmode and therefore cannot use the pre-allocated rtx.
801 Also don't do this when we are making new REGs in reload, since
802 we don't want to get confused with the real pointers. */
804 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
806 if (regno == FRAME_POINTER_REGNUM
807 && (!reload_completed || frame_pointer_needed))
808 return frame_pointer_rtx;
810 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
811 && regno == HARD_FRAME_POINTER_REGNUM
812 && (!reload_completed || frame_pointer_needed))
813 return hard_frame_pointer_rtx;
814 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
815 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
816 && regno == ARG_POINTER_REGNUM)
817 return arg_pointer_rtx;
818 #endif
819 #ifdef RETURN_ADDRESS_POINTER_REGNUM
820 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
821 return return_address_pointer_rtx;
822 #endif
823 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
824 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
825 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
826 return pic_offset_table_rtx;
827 if (regno == STACK_POINTER_REGNUM)
828 return stack_pointer_rtx;
831 #if 0
832 /* If the per-function register table has been set up, try to re-use
833 an existing entry in that table to avoid useless generation of RTL.
835 This code is disabled for now until we can fix the various backends
836 which depend on having non-shared hard registers in some cases. Long
837 term we want to re-enable this code as it can significantly cut down
838 on the amount of useless RTL that gets generated.
840 We'll also need to fix some code that runs after reload that wants to
841 set ORIGINAL_REGNO. */
843 if (cfun
844 && cfun->emit
845 && regno_reg_rtx
846 && regno < FIRST_PSEUDO_REGISTER
847 && reg_raw_mode[regno] == mode)
848 return regno_reg_rtx[regno];
849 #endif
851 return gen_raw_REG (mode, regno);
855 gen_rtx_MEM (machine_mode mode, rtx addr)
857 rtx rt = gen_rtx_raw_MEM (mode, addr);
859 /* This field is not cleared by the mere allocation of the rtx, so
860 we clear it here. */
861 MEM_ATTRS (rt) = 0;
863 return rt;
866 /* Generate a memory referring to non-trapping constant memory. */
869 gen_const_mem (machine_mode mode, rtx addr)
871 rtx mem = gen_rtx_MEM (mode, addr);
872 MEM_READONLY_P (mem) = 1;
873 MEM_NOTRAP_P (mem) = 1;
874 return mem;
877 /* Generate a MEM referring to fixed portions of the frame, e.g., register
878 save areas. */
881 gen_frame_mem (machine_mode mode, rtx addr)
883 rtx mem = gen_rtx_MEM (mode, addr);
884 MEM_NOTRAP_P (mem) = 1;
885 set_mem_alias_set (mem, get_frame_alias_set ());
886 return mem;
889 /* Generate a MEM referring to a temporary use of the stack, not part
890 of the fixed stack frame. For example, something which is pushed
891 by a target splitter. */
893 gen_tmp_stack_mem (machine_mode mode, rtx addr)
895 rtx mem = gen_rtx_MEM (mode, addr);
896 MEM_NOTRAP_P (mem) = 1;
897 if (!cfun->calls_alloca)
898 set_mem_alias_set (mem, get_frame_alias_set ());
899 return mem;
902 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
903 this construct would be valid, and false otherwise. */
905 bool
906 validate_subreg (machine_mode omode, machine_mode imode,
907 const_rtx reg, poly_uint64 offset)
909 poly_uint64 isize = GET_MODE_SIZE (imode);
910 poly_uint64 osize = GET_MODE_SIZE (omode);
912 /* The sizes must be ordered, so that we know whether the subreg
913 is partial, paradoxical or complete. */
914 if (!ordered_p (isize, osize))
915 return false;
917 /* All subregs must be aligned. */
918 if (!multiple_p (offset, osize))
919 return false;
921 /* The subreg offset cannot be outside the inner object. */
922 if (maybe_ge (offset, isize))
923 return false;
925 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
927 /* ??? This should not be here. Temporarily continue to allow word_mode
928 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
929 Generally, backends are doing something sketchy but it'll take time to
930 fix them all. */
931 if (omode == word_mode)
933 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
934 is the culprit here, and not the backends. */
935 else if (known_ge (osize, regsize) && known_ge (isize, osize))
937 /* Allow component subregs of complex and vector. Though given the below
938 extraction rules, it's not always clear what that means. */
939 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
940 && GET_MODE_INNER (imode) == omode)
942 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
943 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
944 surely isn't the cleanest way to represent this. It's questionable
945 if this ought to be represented at all -- why can't this all be hidden
946 in post-reload splitters that make arbitrarily mode changes to the
947 registers themselves. */
948 else if (VECTOR_MODE_P (omode)
949 && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
951 /* Subregs involving floating point modes are not allowed to
952 change size unless it's an insert into a complex mode.
953 Therefore (subreg:DI (reg:DF) 0) and (subreg:CS (reg:SF) 0) are fine, but
954 (subreg:SI (reg:DF) 0) isn't. */
955 else if ((FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
956 && !COMPLEX_MODE_P (omode))
958 if (! (known_eq (isize, osize)
959 /* LRA can use subreg to store a floating point value in
960 an integer mode. Although the floating point and the
961 integer modes need the same number of hard registers,
962 the size of floating point mode can be less than the
963 integer mode. LRA also uses subregs for a register
964 should be used in different mode in on insn. */
965 || lra_in_progress))
966 return false;
969 /* Paradoxical subregs must have offset zero. */
970 if (maybe_gt (osize, isize))
971 return known_eq (offset, 0U);
973 /* This is a normal subreg. Verify that the offset is representable. */
975 /* For hard registers, we already have most of these rules collected in
976 subreg_offset_representable_p. */
977 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
979 unsigned int regno = REGNO (reg);
981 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
982 && GET_MODE_INNER (imode) == omode)
984 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
985 return false;
987 return subreg_offset_representable_p (regno, imode, offset, omode);
990 /* The outer size must be ordered wrt the register size, otherwise
991 we wouldn't know at compile time how many registers the outer
992 mode occupies. */
993 if (!ordered_p (osize, regsize))
994 return false;
996 /* For pseudo registers, we want most of the same checks. Namely:
998 Assume that the pseudo register will be allocated to hard registers
999 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
1000 the remainder must correspond to the lowpart of the containing hard
1001 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
1002 otherwise it is at the lowest offset.
1004 Given that we've already checked the mode and offset alignment,
1005 we only have to check subblock subregs here. */
1006 if (maybe_lt (osize, regsize)
1007 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1009 /* It is invalid for the target to pick a register size for a mode
1010 that isn't ordered wrt to the size of that mode. */
1011 poly_uint64 block_size = ordered_min (isize, regsize);
1012 unsigned int start_reg;
1013 poly_uint64 offset_within_reg;
1014 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1015 || (BYTES_BIG_ENDIAN
1016 ? maybe_ne (offset_within_reg, block_size - osize)
1017 : maybe_ne (offset_within_reg, 0U)))
1018 return false;
1020 return true;
1024 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1026 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1027 return gen_rtx_raw_SUBREG (mode, reg, offset);
1030 /* Generate a SUBREG representing the least-significant part of REG if MODE
1031 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1034 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1036 machine_mode inmode;
1038 inmode = GET_MODE (reg);
1039 if (inmode == VOIDmode)
1040 inmode = mode;
1041 return gen_rtx_SUBREG (mode, reg,
1042 subreg_lowpart_offset (mode, inmode));
1046 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1047 enum var_init_status status)
1049 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1050 PAT_VAR_LOCATION_STATUS (x) = status;
1051 return x;
1055 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1057 rtvec
1058 gen_rtvec (int n, ...)
1060 int i;
1061 rtvec rt_val;
1062 va_list p;
1064 va_start (p, n);
1066 /* Don't allocate an empty rtvec... */
1067 if (n == 0)
1069 va_end (p);
1070 return NULL_RTVEC;
1073 rt_val = rtvec_alloc (n);
1075 for (i = 0; i < n; i++)
1076 rt_val->elem[i] = va_arg (p, rtx);
1078 va_end (p);
1079 return rt_val;
1082 rtvec
1083 gen_rtvec_v (int n, rtx *argp)
1085 int i;
1086 rtvec rt_val;
1088 /* Don't allocate an empty rtvec... */
1089 if (n == 0)
1090 return NULL_RTVEC;
1092 rt_val = rtvec_alloc (n);
1094 for (i = 0; i < n; i++)
1095 rt_val->elem[i] = *argp++;
1097 return rt_val;
1100 rtvec
1101 gen_rtvec_v (int n, rtx_insn **argp)
1103 int i;
1104 rtvec rt_val;
1106 /* Don't allocate an empty rtvec... */
1107 if (n == 0)
1108 return NULL_RTVEC;
1110 rt_val = rtvec_alloc (n);
1112 for (i = 0; i < n; i++)
1113 rt_val->elem[i] = *argp++;
1115 return rt_val;
1119 /* Return the number of bytes between the start of an OUTER_MODE
1120 in-memory value and the start of an INNER_MODE in-memory value,
1121 given that the former is a lowpart of the latter. It may be a
1122 paradoxical lowpart, in which case the offset will be negative
1123 on big-endian targets. */
1125 poly_int64
1126 byte_lowpart_offset (machine_mode outer_mode,
1127 machine_mode inner_mode)
1129 if (paradoxical_subreg_p (outer_mode, inner_mode))
1130 return -subreg_lowpart_offset (inner_mode, outer_mode);
1131 else
1132 return subreg_lowpart_offset (outer_mode, inner_mode);
1135 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1136 from address X. For paradoxical big-endian subregs this is a
1137 negative value, otherwise it's the same as OFFSET. */
1139 poly_int64
1140 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1141 poly_uint64 offset)
1143 if (paradoxical_subreg_p (outer_mode, inner_mode))
1145 gcc_assert (known_eq (offset, 0U));
1146 return -subreg_lowpart_offset (inner_mode, outer_mode);
1148 return offset;
1151 /* As above, but return the offset that existing subreg X would have
1152 if SUBREG_REG (X) were stored in memory. The only significant thing
1153 about the current SUBREG_REG is its mode. */
1155 poly_int64
1156 subreg_memory_offset (const_rtx x)
1158 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1159 SUBREG_BYTE (x));
1162 /* Generate a REG rtx for a new pseudo register of mode MODE.
1163 This pseudo is assigned the next sequential register number. */
1166 gen_reg_rtx (machine_mode mode)
1168 rtx val;
1169 unsigned int align = GET_MODE_ALIGNMENT (mode);
1171 gcc_assert (can_create_pseudo_p ());
1173 /* If a virtual register with bigger mode alignment is generated,
1174 increase stack alignment estimation because it might be spilled
1175 to stack later. */
1176 if (SUPPORTS_STACK_ALIGNMENT
1177 && crtl->stack_alignment_estimated < align
1178 && !crtl->stack_realign_processed)
1180 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1181 if (crtl->stack_alignment_estimated < min_align)
1182 crtl->stack_alignment_estimated = min_align;
1185 if (generating_concat_p
1186 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1187 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1189 /* For complex modes, don't make a single pseudo.
1190 Instead, make a CONCAT of two pseudos.
1191 This allows noncontiguous allocation of the real and imaginary parts,
1192 which makes much better code. Besides, allocating DCmode
1193 pseudos overstrains reload on some machines like the 386. */
1194 rtx realpart, imagpart;
1195 machine_mode partmode = GET_MODE_INNER (mode);
1197 realpart = gen_reg_rtx (partmode);
1198 imagpart = gen_reg_rtx (partmode);
1199 return gen_rtx_CONCAT (mode, realpart, imagpart);
1202 /* Do not call gen_reg_rtx with uninitialized crtl. */
1203 gcc_assert (crtl->emit.regno_pointer_align_length);
1205 crtl->emit.ensure_regno_capacity ();
1206 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1208 val = gen_raw_REG (mode, reg_rtx_no);
1209 regno_reg_rtx[reg_rtx_no++] = val;
1210 return val;
1213 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1214 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1216 void
1217 emit_status::ensure_regno_capacity ()
1219 int old_size = regno_pointer_align_length;
1221 if (reg_rtx_no < old_size)
1222 return;
1224 int new_size = old_size * 2;
1225 while (reg_rtx_no >= new_size)
1226 new_size *= 2;
1228 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1229 memset (tmp + old_size, 0, new_size - old_size);
1230 regno_pointer_align = (unsigned char *) tmp;
1232 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1233 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1234 regno_reg_rtx = new1;
1236 crtl->emit.regno_pointer_align_length = new_size;
1239 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1241 bool
1242 reg_is_parm_p (rtx reg)
1244 tree decl;
1246 gcc_assert (REG_P (reg));
1247 decl = REG_EXPR (reg);
1248 return (decl && TREE_CODE (decl) == PARM_DECL);
1251 /* Update NEW with the same attributes as REG, but with OFFSET added
1252 to the REG_OFFSET. */
1254 static void
1255 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1257 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1258 REG_OFFSET (reg) + offset);
1261 /* Generate a register with same attributes as REG, but with OFFSET
1262 added to the REG_OFFSET. */
1265 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1266 poly_int64 offset)
1268 /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
1269 overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
1270 of the shared REG rtxes like stack_pointer_rtx etc. This should
1271 happen only for SUBREGs from DEBUG_INSNs, RA should ensure
1272 multi-word registers don't overlap the special registers like
1273 stack pointer. */
1274 rtx new_rtx = gen_raw_REG (mode, regno);
1276 update_reg_offset (new_rtx, reg, offset);
1277 return new_rtx;
1280 /* Generate a new pseudo-register with the same attributes as REG, but
1281 with OFFSET added to the REG_OFFSET. */
1284 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1286 rtx new_rtx = gen_reg_rtx (mode);
1288 update_reg_offset (new_rtx, reg, offset);
1289 return new_rtx;
1292 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1293 new register is a (possibly paradoxical) lowpart of the old one. */
1295 void
1296 adjust_reg_mode (rtx reg, machine_mode mode)
1298 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1299 PUT_MODE (reg, mode);
1302 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1303 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1305 void
1306 set_reg_attrs_from_value (rtx reg, rtx x)
1308 poly_int64 offset;
1309 bool can_be_reg_pointer = true;
1311 /* Don't call mark_reg_pointer for incompatible pointer sign
1312 extension. */
1313 while (GET_CODE (x) == SIGN_EXTEND
1314 || GET_CODE (x) == ZERO_EXTEND
1315 || GET_CODE (x) == TRUNCATE
1316 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1318 #if defined(POINTERS_EXTEND_UNSIGNED)
1319 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1320 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1321 || (paradoxical_subreg_p (x)
1322 && ! (SUBREG_PROMOTED_VAR_P (x)
1323 && SUBREG_CHECK_PROMOTED_SIGN (x,
1324 POINTERS_EXTEND_UNSIGNED))))
1325 && !targetm.have_ptr_extend ())
1326 can_be_reg_pointer = false;
1327 #endif
1328 x = XEXP (x, 0);
1331 /* Hard registers can be reused for multiple purposes within the same
1332 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1333 on them is wrong. */
1334 if (HARD_REGISTER_P (reg))
1335 return;
1337 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1338 if (MEM_P (x))
1340 if (MEM_OFFSET_KNOWN_P (x))
1341 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1342 MEM_OFFSET (x) + offset);
1343 if (can_be_reg_pointer && MEM_POINTER (x))
1344 mark_reg_pointer (reg, 0);
1346 else if (REG_P (x))
1348 if (REG_ATTRS (x))
1349 update_reg_offset (reg, x, offset);
1350 if (can_be_reg_pointer && REG_POINTER (x))
1351 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1355 /* Generate a REG rtx for a new pseudo register, copying the mode
1356 and attributes from X. */
1359 gen_reg_rtx_and_attrs (rtx x)
1361 rtx reg = gen_reg_rtx (GET_MODE (x));
1362 set_reg_attrs_from_value (reg, x);
1363 return reg;
1366 /* Set the register attributes for registers contained in PARM_RTX.
1367 Use needed values from memory attributes of MEM. */
1369 void
1370 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1372 if (REG_P (parm_rtx))
1373 set_reg_attrs_from_value (parm_rtx, mem);
1374 else if (GET_CODE (parm_rtx) == PARALLEL)
1376 /* Check for a NULL entry in the first slot, used to indicate that the
1377 parameter goes both on the stack and in registers. */
1378 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1379 for (; i < XVECLEN (parm_rtx, 0); i++)
1381 rtx x = XVECEXP (parm_rtx, 0, i);
1382 if (REG_P (XEXP (x, 0)))
1383 REG_ATTRS (XEXP (x, 0))
1384 = get_reg_attrs (MEM_EXPR (mem),
1385 INTVAL (XEXP (x, 1)));
1390 /* Set the REG_ATTRS for registers in value X, given that X represents
1391 decl T. */
1393 void
1394 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1396 if (!t)
1397 return;
1398 tree tdecl = t;
1399 if (GET_CODE (x) == SUBREG)
1401 gcc_assert (subreg_lowpart_p (x));
1402 x = SUBREG_REG (x);
1404 if (REG_P (x))
1405 REG_ATTRS (x)
1406 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1407 DECL_P (tdecl)
1408 ? DECL_MODE (tdecl)
1409 : TYPE_MODE (TREE_TYPE (tdecl))));
1410 if (GET_CODE (x) == CONCAT)
1412 if (REG_P (XEXP (x, 0)))
1413 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1414 if (REG_P (XEXP (x, 1)))
1415 REG_ATTRS (XEXP (x, 1))
1416 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1418 if (GET_CODE (x) == PARALLEL)
1420 int i, start;
1422 /* Check for a NULL entry, used to indicate that the parameter goes
1423 both on the stack and in registers. */
1424 if (XEXP (XVECEXP (x, 0, 0), 0))
1425 start = 0;
1426 else
1427 start = 1;
1429 for (i = start; i < XVECLEN (x, 0); i++)
1431 rtx y = XVECEXP (x, 0, i);
1432 if (REG_P (XEXP (y, 0)))
1433 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1438 /* Assign the RTX X to declaration T. */
1440 void
1441 set_decl_rtl (tree t, rtx x)
1443 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1444 if (x)
1445 set_reg_attrs_for_decl_rtl (t, x);
1448 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1449 if the ABI requires the parameter to be passed by reference. */
1451 void
1452 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1454 DECL_INCOMING_RTL (t) = x;
1455 if (x && !by_reference_p)
1456 set_reg_attrs_for_decl_rtl (t, x);
1459 /* Identify REG (which may be a CONCAT) as a user register. */
1461 void
1462 mark_user_reg (rtx reg)
1464 if (GET_CODE (reg) == CONCAT)
1466 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1467 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1469 else
1471 gcc_assert (REG_P (reg));
1472 REG_USERVAR_P (reg) = 1;
1476 /* Identify REG as a probable pointer register and show its alignment
1477 as ALIGN, if nonzero. */
1479 void
1480 mark_reg_pointer (rtx reg, int align)
1482 if (! REG_POINTER (reg))
1484 REG_POINTER (reg) = 1;
1486 if (align)
1487 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1489 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1490 /* We can no-longer be sure just how aligned this pointer is. */
1491 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1494 /* Return 1 plus largest pseudo reg number used in the current function. */
1497 max_reg_num (void)
1499 return reg_rtx_no;
1502 /* Return 1 + the largest label number used so far in the current function. */
1505 max_label_num (void)
1507 return label_num;
1510 /* Return first label number used in this function (if any were used). */
1513 get_first_label_num (void)
1515 return first_label_num;
1518 /* If the rtx for label was created during the expansion of a nested
1519 function, then first_label_num won't include this label number.
1520 Fix this now so that array indices work later. */
1522 void
1523 maybe_set_first_label_num (rtx_code_label *x)
1525 if (CODE_LABEL_NUMBER (x) < first_label_num)
1526 first_label_num = CODE_LABEL_NUMBER (x);
1529 /* For use by the RTL function loader, when mingling with normal
1530 functions.
1531 Ensure that label_num is greater than the label num of X, to avoid
1532 duplicate labels in the generated assembler. */
1534 void
1535 maybe_set_max_label_num (rtx_code_label *x)
1537 if (CODE_LABEL_NUMBER (x) >= label_num)
1538 label_num = CODE_LABEL_NUMBER (x) + 1;
1542 /* Return a value representing some low-order bits of X, where the number
1543 of low-order bits is given by MODE. Note that no conversion is done
1544 between floating-point and fixed-point values, rather, the bit
1545 representation is returned.
1547 This function handles the cases in common between gen_lowpart, below,
1548 and two variants in cse.cc and combine.cc. These are the cases that can
1549 be safely handled at all points in the compilation.
1551 If this is not a case we can handle, return 0. */
1554 gen_lowpart_common (machine_mode mode, rtx x)
1556 poly_uint64 msize = GET_MODE_SIZE (mode);
1557 machine_mode innermode;
1559 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1560 so we have to make one up. Yuk. */
1561 innermode = GET_MODE (x);
1562 if (CONST_INT_P (x)
1563 && known_le (msize * BITS_PER_UNIT,
1564 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1565 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1566 else if (innermode == VOIDmode)
1567 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1569 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1571 if (innermode == mode)
1572 return x;
1574 /* The size of the outer and inner modes must be ordered. */
1575 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1576 if (!ordered_p (msize, xsize))
1577 return 0;
1579 if (SCALAR_FLOAT_MODE_P (mode))
1581 /* Don't allow paradoxical FLOAT_MODE subregs. */
1582 if (maybe_gt (msize, xsize))
1583 return 0;
1585 else
1587 /* MODE must occupy no more of the underlying registers than X. */
1588 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1589 unsigned int mregs, xregs;
1590 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1591 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1592 || mregs > xregs)
1593 return 0;
1596 scalar_int_mode int_mode, int_innermode, from_mode;
1597 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1598 && is_a <scalar_int_mode> (mode, &int_mode)
1599 && is_a <scalar_int_mode> (innermode, &int_innermode)
1600 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1602 /* If we are getting the low-order part of something that has been
1603 sign- or zero-extended, we can either just use the object being
1604 extended or make a narrower extension. If we want an even smaller
1605 piece than the size of the object being extended, call ourselves
1606 recursively.
1608 This case is used mostly by combine and cse. */
1610 if (from_mode == int_mode)
1611 return XEXP (x, 0);
1612 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1613 return gen_lowpart_common (int_mode, XEXP (x, 0));
1614 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1615 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1617 else if (GET_CODE (x) == SUBREG || REG_P (x)
1618 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1619 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1620 || CONST_POLY_INT_P (x))
1621 return lowpart_subreg (mode, x, innermode);
1623 /* Otherwise, we can't do this. */
1624 return 0;
1628 gen_highpart (machine_mode mode, rtx x)
1630 poly_uint64 msize = GET_MODE_SIZE (mode);
1631 rtx result;
1633 /* This case loses if X is a subreg. To catch bugs early,
1634 complain if an invalid MODE is used even in other cases. */
1635 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1636 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1638 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1639 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1640 it will produce something meaningful. The only case we need to handle
1641 specially here is MEM. */
1642 if (MEM_P (x))
1644 poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
1645 return adjust_address (x, mode, offset);
1648 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1649 subreg_highpart_offset (mode, GET_MODE (x)));
1650 /* Since we handle MEM directly above, we should never get a MEM back
1651 from simplify_gen_subreg. */
1652 gcc_assert (result && !MEM_P (result));
1654 return result;
1657 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1658 be VOIDmode constant. */
1660 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1662 if (GET_MODE (exp) != VOIDmode)
1664 gcc_assert (GET_MODE (exp) == innermode);
1665 return gen_highpart (outermode, exp);
1667 return simplify_gen_subreg (outermode, exp, innermode,
1668 subreg_highpart_offset (outermode, innermode));
1671 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1672 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1674 poly_uint64
1675 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1677 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1678 if (maybe_gt (outer_bytes, inner_bytes))
1679 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1680 return 0;
1682 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1683 return inner_bytes - outer_bytes;
1684 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1685 return 0;
1686 else
1687 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1690 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1691 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1693 poly_uint64
1694 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1696 gcc_assert (known_ge (inner_bytes, outer_bytes));
1698 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1699 return 0;
1700 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1701 return inner_bytes - outer_bytes;
1702 else
1703 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1704 (inner_bytes - outer_bytes)
1705 * BITS_PER_UNIT);
1708 /* Return 1 iff X, assumed to be a SUBREG,
1709 refers to the least significant part of its containing reg.
1710 If X is not a SUBREG, always return 1 (it is its own low part!). */
1713 subreg_lowpart_p (const_rtx x)
1715 if (GET_CODE (x) != SUBREG)
1716 return 1;
1717 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1718 return 0;
1720 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1721 GET_MODE (SUBREG_REG (x))),
1722 SUBREG_BYTE (x));
1725 /* Return subword OFFSET of operand OP.
1726 The word number, OFFSET, is interpreted as the word number starting
1727 at the low-order address. OFFSET 0 is the low-order word if not
1728 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1730 If we cannot extract the required word, we return zero. Otherwise,
1731 an rtx corresponding to the requested word will be returned.
1733 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1734 reload has completed, a valid address will always be returned. After
1735 reload, if a valid address cannot be returned, we return zero.
1737 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1738 it is the responsibility of the caller.
1740 MODE is the mode of OP in case it is a CONST_INT.
1742 ??? This is still rather broken for some cases. The problem for the
1743 moment is that all callers of this thing provide no 'goal mode' to
1744 tell us to work with. This exists because all callers were written
1745 in a word based SUBREG world.
1746 Now use of this function can be deprecated by simplify_subreg in most
1747 cases.
1751 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1752 machine_mode mode)
1754 if (mode == VOIDmode)
1755 mode = GET_MODE (op);
1757 gcc_assert (mode != VOIDmode);
1759 /* If OP is narrower than a word, fail. */
1760 if (mode != BLKmode
1761 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1762 return 0;
1764 /* If we want a word outside OP, return zero. */
1765 if (mode != BLKmode
1766 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1767 return const0_rtx;
1769 /* Form a new MEM at the requested address. */
1770 if (MEM_P (op))
1772 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1774 if (! validate_address)
1775 return new_rtx;
1777 else if (reload_completed)
1779 if (! strict_memory_address_addr_space_p (word_mode,
1780 XEXP (new_rtx, 0),
1781 MEM_ADDR_SPACE (op)))
1782 return 0;
1784 else
1785 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1788 /* Rest can be handled by simplify_subreg. */
1789 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1792 /* Similar to `operand_subword', but never return 0. If we can't
1793 extract the required subword, put OP into a register and try again.
1794 The second attempt must succeed. We always validate the address in
1795 this case.
1797 MODE is the mode of OP, in case it is CONST_INT. */
1800 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1802 rtx result = operand_subword (op, offset, 1, mode);
1804 if (result)
1805 return result;
1807 if (mode != BLKmode && mode != VOIDmode)
1809 /* If this is a register which cannot be accessed by words, copy it
1810 to a pseudo register. */
1811 if (REG_P (op))
1812 op = copy_to_reg (op);
1813 else
1814 op = force_reg (mode, op);
1817 result = operand_subword (op, offset, 1, mode);
1818 gcc_assert (result);
1820 return result;
1823 mem_attrs::mem_attrs ()
1824 : expr (NULL_TREE),
1825 offset (0),
1826 size (0),
1827 alias (0),
1828 align (0),
1829 addrspace (ADDR_SPACE_GENERIC),
1830 offset_known_p (false),
1831 size_known_p (false)
1834 /* Returns 1 if both MEM_EXPR can be considered equal
1835 and 0 otherwise. */
1838 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1840 if (expr1 == expr2)
1841 return 1;
1843 if (! expr1 || ! expr2)
1844 return 0;
1846 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1847 return 0;
1849 return operand_equal_p (expr1, expr2, 0);
1852 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1853 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1854 -1 if not known. */
1857 get_mem_align_offset (rtx mem, unsigned int align)
1859 tree expr;
1860 poly_uint64 offset;
1862 /* This function can't use
1863 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1864 || (MAX (MEM_ALIGN (mem),
1865 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1866 < align))
1867 return -1;
1868 else
1869 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1870 for two reasons:
1871 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1872 for <variable>. get_inner_reference doesn't handle it and
1873 even if it did, the alignment in that case needs to be determined
1874 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1875 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1876 isn't sufficiently aligned, the object it is in might be. */
1877 gcc_assert (MEM_P (mem));
1878 expr = MEM_EXPR (mem);
1879 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1880 return -1;
1882 offset = MEM_OFFSET (mem);
1883 if (DECL_P (expr))
1885 if (DECL_ALIGN (expr) < align)
1886 return -1;
1888 else if (INDIRECT_REF_P (expr))
1890 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1891 return -1;
1893 else if (TREE_CODE (expr) == COMPONENT_REF)
1895 while (1)
1897 tree inner = TREE_OPERAND (expr, 0);
1898 tree field = TREE_OPERAND (expr, 1);
1899 tree byte_offset = component_ref_field_offset (expr);
1900 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1902 poly_uint64 suboffset;
1903 if (!byte_offset
1904 || !poly_int_tree_p (byte_offset, &suboffset)
1905 || !tree_fits_uhwi_p (bit_offset))
1906 return -1;
1908 offset += suboffset;
1909 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1911 if (inner == NULL_TREE)
1913 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1914 < (unsigned int) align)
1915 return -1;
1916 break;
1918 else if (DECL_P (inner))
1920 if (DECL_ALIGN (inner) < align)
1921 return -1;
1922 break;
1924 else if (TREE_CODE (inner) != COMPONENT_REF)
1925 return -1;
1926 expr = inner;
1929 else
1930 return -1;
1932 HOST_WIDE_INT misalign;
1933 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1934 return -1;
1935 return misalign;
1938 /* Given REF (a MEM) and T, either the type of X or the expression
1939 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1940 if we are making a new object of this type. BITPOS is nonzero if
1941 there is an offset outstanding on T that will be applied later. */
1943 void
1944 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1945 poly_int64 bitpos)
1947 poly_int64 apply_bitpos = 0;
1948 tree type;
1949 class mem_attrs attrs, *defattrs, *refattrs;
1950 addr_space_t as;
1952 /* It can happen that type_for_mode was given a mode for which there
1953 is no language-level type. In which case it returns NULL, which
1954 we can see here. */
1955 if (t == NULL_TREE)
1956 return;
1958 type = TYPE_P (t) ? t : TREE_TYPE (t);
1959 if (type == error_mark_node)
1960 return;
1962 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1963 wrong answer, as it assumes that DECL_RTL already has the right alias
1964 info. Callers should not set DECL_RTL until after the call to
1965 set_mem_attributes. */
1966 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1968 /* Get the alias set from the expression or type (perhaps using a
1969 front-end routine) and use it. */
1970 attrs.alias = get_alias_set (t);
1972 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1973 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1975 /* Default values from pre-existing memory attributes if present. */
1976 refattrs = MEM_ATTRS (ref);
1977 if (refattrs)
1979 /* ??? Can this ever happen? Calling this routine on a MEM that
1980 already carries memory attributes should probably be invalid. */
1981 attrs.expr = refattrs->expr;
1982 attrs.offset_known_p = refattrs->offset_known_p;
1983 attrs.offset = refattrs->offset;
1984 attrs.size_known_p = refattrs->size_known_p;
1985 attrs.size = refattrs->size;
1986 attrs.align = refattrs->align;
1989 /* Otherwise, default values from the mode of the MEM reference. */
1990 else
1992 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1993 gcc_assert (!defattrs->expr);
1994 gcc_assert (!defattrs->offset_known_p);
1996 /* Respect mode size. */
1997 attrs.size_known_p = defattrs->size_known_p;
1998 attrs.size = defattrs->size;
1999 /* ??? Is this really necessary? We probably should always get
2000 the size from the type below. */
2002 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
2003 if T is an object, always compute the object alignment below. */
2004 if (TYPE_P (t))
2005 attrs.align = defattrs->align;
2006 else
2007 attrs.align = BITS_PER_UNIT;
2008 /* ??? If T is a type, respecting mode alignment may *also* be wrong
2009 e.g. if the type carries an alignment attribute. Should we be
2010 able to simply always use TYPE_ALIGN? */
2013 /* We can set the alignment from the type if we are making an object or if
2014 this is an INDIRECT_REF. */
2015 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2016 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2018 /* If the size is known, we can set that. */
2019 tree new_size = TYPE_SIZE_UNIT (type);
2021 /* The address-space is that of the type. */
2022 as = TYPE_ADDR_SPACE (type);
2024 /* If T is not a type, we may be able to deduce some more information about
2025 the expression. */
2026 if (! TYPE_P (t))
2028 tree base;
2030 if (TREE_THIS_VOLATILE (t))
2031 MEM_VOLATILE_P (ref) = 1;
2033 /* Now remove any conversions: they don't change what the underlying
2034 object is. Likewise for SAVE_EXPR. */
2035 while (CONVERT_EXPR_P (t)
2036 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2037 || TREE_CODE (t) == SAVE_EXPR)
2038 t = TREE_OPERAND (t, 0);
2040 /* Note whether this expression can trap. */
2041 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2043 base = get_base_address (t);
2044 if (base)
2046 if (DECL_P (base)
2047 && TREE_READONLY (base)
2048 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2049 && !TREE_THIS_VOLATILE (base))
2050 MEM_READONLY_P (ref) = 1;
2052 /* Mark static const strings readonly as well. */
2053 if (TREE_CODE (base) == STRING_CST
2054 && TREE_READONLY (base)
2055 && TREE_STATIC (base))
2056 MEM_READONLY_P (ref) = 1;
2058 /* Address-space information is on the base object. */
2059 if (TREE_CODE (base) == MEM_REF
2060 || TREE_CODE (base) == TARGET_MEM_REF)
2061 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2062 0))));
2063 else
2064 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2067 /* If this expression uses it's parent's alias set, mark it such
2068 that we won't change it. */
2069 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2070 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2072 /* If this is a decl, set the attributes of the MEM from it. */
2073 if (DECL_P (t))
2075 attrs.expr = t;
2076 attrs.offset_known_p = true;
2077 attrs.offset = 0;
2078 apply_bitpos = bitpos;
2079 new_size = DECL_SIZE_UNIT (t);
2082 /* ??? If we end up with a constant or a descriptor do not
2083 record a MEM_EXPR. */
2084 else if (CONSTANT_CLASS_P (t)
2085 || TREE_CODE (t) == CONSTRUCTOR)
2088 /* If this is a field reference, record it. */
2089 else if (TREE_CODE (t) == COMPONENT_REF)
2091 attrs.expr = t;
2092 attrs.offset_known_p = true;
2093 attrs.offset = 0;
2094 apply_bitpos = bitpos;
2095 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2096 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2099 /* Else record it. */
2100 else
2102 gcc_assert (handled_component_p (t)
2103 || TREE_CODE (t) == MEM_REF
2104 || TREE_CODE (t) == TARGET_MEM_REF);
2105 attrs.expr = t;
2106 attrs.offset_known_p = true;
2107 attrs.offset = 0;
2108 apply_bitpos = bitpos;
2111 /* If this is a reference based on a partitioned decl replace the
2112 base with a MEM_REF of the pointer representative we created
2113 during stack slot partitioning. */
2114 if (attrs.expr
2115 && VAR_P (base)
2116 && ! is_global_var (base)
2117 && cfun->gimple_df->decls_to_pointers != NULL)
2119 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2120 if (namep)
2122 attrs.expr = unshare_expr (attrs.expr);
2123 tree *orig_base = &attrs.expr;
2124 while (handled_component_p (*orig_base))
2125 orig_base = &TREE_OPERAND (*orig_base, 0);
2126 tree aptrt = reference_alias_ptr_type (*orig_base);
2127 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2128 build_int_cst (aptrt, 0));
2132 /* Compute the alignment. */
2133 unsigned int obj_align;
2134 unsigned HOST_WIDE_INT obj_bitpos;
2135 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2136 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2137 if (diff_align != 0)
2138 obj_align = MIN (obj_align, diff_align);
2139 attrs.align = MAX (attrs.align, obj_align);
2142 poly_uint64 const_size;
2143 if (poly_int_tree_p (new_size, &const_size))
2145 attrs.size_known_p = true;
2146 attrs.size = const_size;
2149 /* If we modified OFFSET based on T, then subtract the outstanding
2150 bit position offset. Similarly, increase the size of the accessed
2151 object to contain the negative offset. */
2152 if (maybe_ne (apply_bitpos, 0))
2154 gcc_assert (attrs.offset_known_p);
2155 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2156 attrs.offset -= bytepos;
2157 if (attrs.size_known_p)
2158 attrs.size += bytepos;
2161 /* Now set the attributes we computed above. */
2162 attrs.addrspace = as;
2163 set_mem_attrs (ref, &attrs);
2166 void
2167 set_mem_attributes (rtx ref, tree t, int objectp)
2169 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2172 /* Set the alias set of MEM to SET. */
2174 void
2175 set_mem_alias_set (rtx mem, alias_set_type set)
2177 /* If the new and old alias sets don't conflict, something is wrong. */
2178 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2179 mem_attrs attrs (*get_mem_attrs (mem));
2180 attrs.alias = set;
2181 set_mem_attrs (mem, &attrs);
2184 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2186 void
2187 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2189 mem_attrs attrs (*get_mem_attrs (mem));
2190 attrs.addrspace = addrspace;
2191 set_mem_attrs (mem, &attrs);
2194 /* Set the alignment of MEM to ALIGN bits. */
2196 void
2197 set_mem_align (rtx mem, unsigned int align)
2199 mem_attrs attrs (*get_mem_attrs (mem));
2200 attrs.align = align;
2201 set_mem_attrs (mem, &attrs);
2204 /* Set the expr for MEM to EXPR. */
2206 void
2207 set_mem_expr (rtx mem, tree expr)
2209 mem_attrs attrs (*get_mem_attrs (mem));
2210 attrs.expr = expr;
2211 set_mem_attrs (mem, &attrs);
2214 /* Set the offset of MEM to OFFSET. */
2216 void
2217 set_mem_offset (rtx mem, poly_int64 offset)
2219 mem_attrs attrs (*get_mem_attrs (mem));
2220 attrs.offset_known_p = true;
2221 attrs.offset = offset;
2222 set_mem_attrs (mem, &attrs);
2225 /* Clear the offset of MEM. */
2227 void
2228 clear_mem_offset (rtx mem)
2230 mem_attrs attrs (*get_mem_attrs (mem));
2231 attrs.offset_known_p = false;
2232 set_mem_attrs (mem, &attrs);
2235 /* Set the size of MEM to SIZE. */
2237 void
2238 set_mem_size (rtx mem, poly_int64 size)
2240 mem_attrs attrs (*get_mem_attrs (mem));
2241 attrs.size_known_p = true;
2242 attrs.size = size;
2243 set_mem_attrs (mem, &attrs);
2246 /* Clear the size of MEM. */
2248 void
2249 clear_mem_size (rtx mem)
2251 mem_attrs attrs (*get_mem_attrs (mem));
2252 attrs.size_known_p = false;
2253 set_mem_attrs (mem, &attrs);
2256 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2257 and its address changed to ADDR. (VOIDmode means don't change the mode.
2258 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2259 returned memory location is required to be valid. INPLACE is true if any
2260 changes can be made directly to MEMREF or false if MEMREF must be treated
2261 as immutable.
2263 The memory attributes are not changed. */
2265 static rtx
2266 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2267 bool inplace)
2269 addr_space_t as;
2270 rtx new_rtx;
2272 gcc_assert (MEM_P (memref));
2273 as = MEM_ADDR_SPACE (memref);
2274 if (mode == VOIDmode)
2275 mode = GET_MODE (memref);
2276 if (addr == 0)
2277 addr = XEXP (memref, 0);
2278 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2279 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2280 return memref;
2282 /* Don't validate address for LRA. LRA can make the address valid
2283 by itself in most efficient way. */
2284 if (validate && !lra_in_progress)
2286 if (reload_in_progress || reload_completed)
2287 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2288 else
2289 addr = memory_address_addr_space (mode, addr, as);
2292 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2293 return memref;
2295 if (inplace)
2297 XEXP (memref, 0) = addr;
2298 return memref;
2301 new_rtx = gen_rtx_MEM (mode, addr);
2302 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2303 return new_rtx;
2306 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2307 way we are changing MEMREF, so we only preserve the alias set. */
2310 change_address (rtx memref, machine_mode mode, rtx addr)
2312 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2313 machine_mode mmode = GET_MODE (new_rtx);
2314 class mem_attrs *defattrs;
2316 mem_attrs attrs (*get_mem_attrs (memref));
2317 defattrs = mode_mem_attrs[(int) mmode];
2318 attrs.expr = NULL_TREE;
2319 attrs.offset_known_p = false;
2320 attrs.size_known_p = defattrs->size_known_p;
2321 attrs.size = defattrs->size;
2322 attrs.align = defattrs->align;
2324 /* If there are no changes, just return the original memory reference. */
2325 if (new_rtx == memref)
2327 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2328 return new_rtx;
2330 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2331 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2334 set_mem_attrs (new_rtx, &attrs);
2335 return new_rtx;
2338 /* Return a memory reference like MEMREF, but with its mode changed
2339 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2340 nonzero, the memory address is forced to be valid.
2341 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2342 and the caller is responsible for adjusting MEMREF base register.
2343 If ADJUST_OBJECT is zero, the underlying object associated with the
2344 memory reference is left unchanged and the caller is responsible for
2345 dealing with it. Otherwise, if the new memory reference is outside
2346 the underlying object, even partially, then the object is dropped.
2347 SIZE, if nonzero, is the size of an access in cases where MODE
2348 has no inherent size. */
2351 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2352 int validate, int adjust_address, int adjust_object,
2353 poly_int64 size)
2355 rtx addr = XEXP (memref, 0);
2356 rtx new_rtx;
2357 scalar_int_mode address_mode;
2358 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2359 unsigned HOST_WIDE_INT max_align;
2360 #ifdef POINTERS_EXTEND_UNSIGNED
2361 scalar_int_mode pointer_mode
2362 = targetm.addr_space.pointer_mode (attrs.addrspace);
2363 #endif
2365 /* VOIDmode means no mode change for change_address_1. */
2366 if (mode == VOIDmode)
2367 mode = GET_MODE (memref);
2369 /* Take the size of non-BLKmode accesses from the mode. */
2370 defattrs = mode_mem_attrs[(int) mode];
2371 if (defattrs->size_known_p)
2372 size = defattrs->size;
2374 /* If there are no changes, just return the original memory reference. */
2375 if (mode == GET_MODE (memref)
2376 && known_eq (offset, 0)
2377 && (known_eq (size, 0)
2378 || (attrs.size_known_p && known_eq (attrs.size, size)))
2379 && (!validate || memory_address_addr_space_p (mode, addr,
2380 attrs.addrspace)))
2381 return memref;
2383 /* ??? Prefer to create garbage instead of creating shared rtl.
2384 This may happen even if offset is nonzero -- consider
2385 (plus (plus reg reg) const_int) -- so do this always. */
2386 addr = copy_rtx (addr);
2388 /* Convert a possibly large offset to a signed value within the
2389 range of the target address space. */
2390 address_mode = get_address_mode (memref);
2391 offset = trunc_int_for_mode (offset, address_mode);
2393 if (adjust_address)
2395 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2396 object, we can merge it into the LO_SUM. */
2397 if (GET_MODE (memref) != BLKmode
2398 && GET_CODE (addr) == LO_SUM
2399 && known_in_range_p (offset,
2400 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2401 / BITS_PER_UNIT)))
2402 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2403 plus_constant (address_mode,
2404 XEXP (addr, 1), offset));
2405 #ifdef POINTERS_EXTEND_UNSIGNED
2406 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2407 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2408 the fact that pointers are not allowed to overflow. */
2409 else if (POINTERS_EXTEND_UNSIGNED > 0
2410 && GET_CODE (addr) == ZERO_EXTEND
2411 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2412 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2413 addr = gen_rtx_ZERO_EXTEND (address_mode,
2414 plus_constant (pointer_mode,
2415 XEXP (addr, 0), offset));
2416 #endif
2417 else
2418 addr = plus_constant (address_mode, addr, offset);
2421 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2423 /* If the address is a REG, change_address_1 rightfully returns memref,
2424 but this would destroy memref's MEM_ATTRS. */
2425 if (new_rtx == memref && maybe_ne (offset, 0))
2426 new_rtx = copy_rtx (new_rtx);
2428 /* Conservatively drop the object if we don't know where we start from. */
2429 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2431 attrs.expr = NULL_TREE;
2432 attrs.alias = 0;
2435 /* Compute the new values of the memory attributes due to this adjustment.
2436 We add the offsets and update the alignment. */
2437 if (attrs.offset_known_p)
2439 attrs.offset += offset;
2441 /* Drop the object if the new left end is not within its bounds. */
2442 if (adjust_object && maybe_lt (attrs.offset, 0))
2444 attrs.expr = NULL_TREE;
2445 attrs.alias = 0;
2449 /* Compute the new alignment by taking the MIN of the alignment and the
2450 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2451 if zero. */
2452 if (maybe_ne (offset, 0))
2454 max_align = known_alignment (offset) * BITS_PER_UNIT;
2455 attrs.align = MIN (attrs.align, max_align);
2458 if (maybe_ne (size, 0))
2460 /* Drop the object if the new right end is not within its bounds. */
2461 if (adjust_object && maybe_gt (offset + size, attrs.size))
2463 attrs.expr = NULL_TREE;
2464 attrs.alias = 0;
2466 attrs.size_known_p = true;
2467 attrs.size = size;
2469 else if (attrs.size_known_p)
2471 gcc_assert (!adjust_object);
2472 attrs.size -= offset;
2473 /* ??? The store_by_pieces machinery generates negative sizes,
2474 so don't assert for that here. */
2477 set_mem_attrs (new_rtx, &attrs);
2479 return new_rtx;
2482 /* Return a memory reference like MEMREF, but with its mode changed
2483 to MODE and its address changed to ADDR, which is assumed to be
2484 MEMREF offset by OFFSET bytes. If VALIDATE is
2485 nonzero, the memory address is forced to be valid. */
2488 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2489 poly_int64 offset, int validate)
2491 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2492 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2495 /* Return a memory reference like MEMREF, but whose address is changed by
2496 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2497 known to be in OFFSET (possibly 1). */
2500 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2502 rtx new_rtx, addr = XEXP (memref, 0);
2503 machine_mode address_mode;
2504 class mem_attrs *defattrs;
2506 mem_attrs attrs (*get_mem_attrs (memref));
2507 address_mode = get_address_mode (memref);
2508 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2510 /* At this point we don't know _why_ the address is invalid. It
2511 could have secondary memory references, multiplies or anything.
2513 However, if we did go and rearrange things, we can wind up not
2514 being able to recognize the magic around pic_offset_table_rtx.
2515 This stuff is fragile, and is yet another example of why it is
2516 bad to expose PIC machinery too early. */
2517 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2518 attrs.addrspace)
2519 && GET_CODE (addr) == PLUS
2520 && XEXP (addr, 0) == pic_offset_table_rtx)
2522 addr = force_reg (GET_MODE (addr), addr);
2523 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2526 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2527 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2529 /* If there are no changes, just return the original memory reference. */
2530 if (new_rtx == memref)
2531 return new_rtx;
2533 /* Update the alignment to reflect the offset. Reset the offset, which
2534 we don't know. */
2535 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2536 attrs.offset_known_p = false;
2537 attrs.size_known_p = defattrs->size_known_p;
2538 attrs.size = defattrs->size;
2539 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2540 set_mem_attrs (new_rtx, &attrs);
2541 return new_rtx;
2544 /* Return a memory reference like MEMREF, but with its address changed to
2545 ADDR. The caller is asserting that the actual piece of memory pointed
2546 to is the same, just the form of the address is being changed, such as
2547 by putting something into a register. INPLACE is true if any changes
2548 can be made directly to MEMREF or false if MEMREF must be treated as
2549 immutable. */
2552 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2554 /* change_address_1 copies the memory attribute structure without change
2555 and that's exactly what we want here. */
2556 update_temp_slot_address (XEXP (memref, 0), addr);
2557 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2560 /* Likewise, but the reference is not required to be valid. */
2563 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2565 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2568 /* Return a memory reference like MEMREF, but with its mode widened to
2569 MODE and offset by OFFSET. This would be used by targets that e.g.
2570 cannot issue QImode memory operations and have to use SImode memory
2571 operations plus masking logic. */
2574 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2576 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2577 poly_uint64 size = GET_MODE_SIZE (mode);
2579 /* If there are no changes, just return the original memory reference. */
2580 if (new_rtx == memref)
2581 return new_rtx;
2583 mem_attrs attrs (*get_mem_attrs (new_rtx));
2585 /* If we don't know what offset we were at within the expression, then
2586 we can't know if we've overstepped the bounds. */
2587 if (! attrs.offset_known_p)
2588 attrs.expr = NULL_TREE;
2590 while (attrs.expr)
2592 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2594 tree field = TREE_OPERAND (attrs.expr, 1);
2595 tree offset = component_ref_field_offset (attrs.expr);
2597 if (! DECL_SIZE_UNIT (field))
2599 attrs.expr = NULL_TREE;
2600 break;
2603 /* Is the field at least as large as the access? If so, ok,
2604 otherwise strip back to the containing structure. */
2605 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2606 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2607 && known_ge (attrs.offset, 0))
2608 break;
2610 poly_uint64 suboffset;
2611 if (!poly_int_tree_p (offset, &suboffset))
2613 attrs.expr = NULL_TREE;
2614 break;
2617 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2618 attrs.offset += suboffset;
2619 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2620 / BITS_PER_UNIT);
2622 /* Similarly for the decl. */
2623 else if (DECL_P (attrs.expr)
2624 && DECL_SIZE_UNIT (attrs.expr)
2625 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2626 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2627 size)
2628 && known_ge (attrs.offset, 0))
2629 break;
2630 else
2632 /* The widened memory access overflows the expression, which means
2633 that it could alias another expression. Zap it. */
2634 attrs.expr = NULL_TREE;
2635 break;
2639 if (! attrs.expr)
2640 attrs.offset_known_p = false;
2642 /* The widened memory may alias other stuff, so zap the alias set. */
2643 /* ??? Maybe use get_alias_set on any remaining expression. */
2644 attrs.alias = 0;
2645 attrs.size_known_p = true;
2646 attrs.size = size;
2647 set_mem_attrs (new_rtx, &attrs);
2648 return new_rtx;
2651 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2652 static GTY(()) tree spill_slot_decl;
2654 tree
2655 get_spill_slot_decl (bool force_build_p)
2657 tree d = spill_slot_decl;
2658 rtx rd;
2660 if (d || !force_build_p)
2661 return d;
2663 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2664 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2665 DECL_ARTIFICIAL (d) = 1;
2666 DECL_IGNORED_P (d) = 1;
2667 TREE_USED (d) = 1;
2668 spill_slot_decl = d;
2670 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2671 MEM_NOTRAP_P (rd) = 1;
2672 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2673 attrs.alias = new_alias_set ();
2674 attrs.expr = d;
2675 set_mem_attrs (rd, &attrs);
2676 SET_DECL_RTL (d, rd);
2678 return d;
2681 /* Given MEM, a result from assign_stack_local, fill in the memory
2682 attributes as appropriate for a register allocator spill slot.
2683 These slots are not aliasable by other memory. We arrange for
2684 them all to use a single MEM_EXPR, so that the aliasing code can
2685 work properly in the case of shared spill slots. */
2687 void
2688 set_mem_attrs_for_spill (rtx mem)
2690 rtx addr;
2692 mem_attrs attrs (*get_mem_attrs (mem));
2693 attrs.expr = get_spill_slot_decl (true);
2694 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2695 attrs.addrspace = ADDR_SPACE_GENERIC;
2697 /* We expect the incoming memory to be of the form:
2698 (mem:MODE (plus (reg sfp) (const_int offset)))
2699 with perhaps the plus missing for offset = 0. */
2700 addr = XEXP (mem, 0);
2701 attrs.offset_known_p = true;
2702 strip_offset (addr, &attrs.offset);
2704 set_mem_attrs (mem, &attrs);
2705 MEM_NOTRAP_P (mem) = 1;
2708 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2710 rtx_code_label *
2711 gen_label_rtx (void)
2713 return as_a <rtx_code_label *> (
2714 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2715 NULL, label_num++, NULL));
2718 /* For procedure integration. */
2720 /* Install new pointers to the first and last insns in the chain.
2721 Also, set cur_insn_uid to one higher than the last in use.
2722 Used for an inline-procedure after copying the insn chain. */
2724 void
2725 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2727 rtx_insn *insn;
2729 set_first_insn (first);
2730 set_last_insn (last);
2731 cur_insn_uid = 0;
2733 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2735 int debug_count = 0;
2737 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2738 cur_debug_insn_uid = 0;
2740 for (insn = first; insn; insn = NEXT_INSN (insn))
2741 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2742 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2743 else
2745 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2746 if (DEBUG_INSN_P (insn))
2747 debug_count++;
2750 if (debug_count)
2751 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2752 else
2753 cur_debug_insn_uid++;
2755 else
2756 for (insn = first; insn; insn = NEXT_INSN (insn))
2757 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2759 cur_insn_uid++;
2762 /* Go through all the RTL insn bodies and copy any invalid shared
2763 structure. This routine should only be called once. */
2765 static void
2766 unshare_all_rtl_1 (rtx_insn *insn)
2768 /* Unshare just about everything else. */
2769 unshare_all_rtl_in_chain (insn);
2771 /* Make sure the addresses of stack slots found outside the insn chain
2772 (such as, in DECL_RTL of a variable) are not shared
2773 with the insn chain.
2775 This special care is necessary when the stack slot MEM does not
2776 actually appear in the insn chain. If it does appear, its address
2777 is unshared from all else at that point. */
2778 unsigned int i;
2779 rtx temp;
2780 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2781 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2784 /* Go through all the RTL insn bodies and copy any invalid shared
2785 structure, again. This is a fairly expensive thing to do so it
2786 should be done sparingly. */
2788 void
2789 unshare_all_rtl_again (rtx_insn *insn)
2791 rtx_insn *p;
2792 tree decl;
2794 for (p = insn; p; p = NEXT_INSN (p))
2795 if (INSN_P (p))
2797 reset_used_flags (PATTERN (p));
2798 reset_used_flags (REG_NOTES (p));
2799 if (CALL_P (p))
2800 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2803 /* Make sure that virtual stack slots are not shared. */
2804 set_used_decls (DECL_INITIAL (cfun->decl));
2806 /* Make sure that virtual parameters are not shared. */
2807 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2808 set_used_flags (DECL_RTL (decl));
2810 rtx temp;
2811 unsigned int i;
2812 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2813 reset_used_flags (temp);
2815 unshare_all_rtl_1 (insn);
2818 unsigned int
2819 unshare_all_rtl (void)
2821 unshare_all_rtl_1 (get_insns ());
2823 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2825 if (DECL_RTL_SET_P (decl))
2826 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2827 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2830 return 0;
2834 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2835 Recursively does the same for subexpressions. */
2837 static void
2838 verify_rtx_sharing (rtx orig, rtx insn)
2840 rtx x = orig;
2841 int i;
2842 enum rtx_code code;
2843 const char *format_ptr;
2845 if (x == 0)
2846 return;
2848 code = GET_CODE (x);
2850 /* These types may be freely shared. */
2852 switch (code)
2854 case REG:
2855 case DEBUG_EXPR:
2856 case VALUE:
2857 CASE_CONST_ANY:
2858 case SYMBOL_REF:
2859 case LABEL_REF:
2860 case CODE_LABEL:
2861 case PC:
2862 case RETURN:
2863 case SIMPLE_RETURN:
2864 case SCRATCH:
2865 /* SCRATCH must be shared because they represent distinct values. */
2866 return;
2867 case CLOBBER:
2868 /* Share clobbers of hard registers, but do not share pseudo reg
2869 clobbers or clobbers of hard registers that originated as pseudos.
2870 This is needed to allow safe register renaming. */
2871 if (REG_P (XEXP (x, 0))
2872 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2873 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2874 return;
2875 break;
2877 case CONST:
2878 if (shared_const_p (orig))
2879 return;
2880 break;
2882 case MEM:
2883 /* A MEM is allowed to be shared if its address is constant. */
2884 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2885 || reload_completed || reload_in_progress)
2886 return;
2888 break;
2890 default:
2891 break;
2894 /* This rtx may not be shared. If it has already been seen,
2895 replace it with a copy of itself. */
2896 if (flag_checking && RTX_FLAG (x, used))
2898 error ("invalid rtl sharing found in the insn");
2899 debug_rtx (insn);
2900 error ("shared rtx");
2901 debug_rtx (x);
2902 internal_error ("internal consistency failure");
2904 gcc_assert (!RTX_FLAG (x, used));
2906 RTX_FLAG (x, used) = 1;
2908 /* Now scan the subexpressions recursively. */
2910 format_ptr = GET_RTX_FORMAT (code);
2912 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2914 switch (*format_ptr++)
2916 case 'e':
2917 verify_rtx_sharing (XEXP (x, i), insn);
2918 break;
2920 case 'E':
2921 if (XVEC (x, i) != NULL)
2923 int j;
2924 int len = XVECLEN (x, i);
2926 for (j = 0; j < len; j++)
2928 /* We allow sharing of ASM_OPERANDS inside single
2929 instruction. */
2930 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2931 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2932 == ASM_OPERANDS))
2933 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2934 else
2935 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2938 break;
2941 return;
2944 /* Reset used-flags for INSN. */
2946 static void
2947 reset_insn_used_flags (rtx insn)
2949 gcc_assert (INSN_P (insn));
2950 reset_used_flags (PATTERN (insn));
2951 reset_used_flags (REG_NOTES (insn));
2952 if (CALL_P (insn))
2953 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2956 /* Go through all the RTL insn bodies and clear all the USED bits. */
2958 static void
2959 reset_all_used_flags (void)
2961 rtx_insn *p;
2963 for (p = get_insns (); p; p = NEXT_INSN (p))
2964 if (INSN_P (p))
2966 rtx pat = PATTERN (p);
2967 if (GET_CODE (pat) != SEQUENCE)
2968 reset_insn_used_flags (p);
2969 else
2971 gcc_assert (REG_NOTES (p) == NULL);
2972 for (int i = 0; i < XVECLEN (pat, 0); i++)
2974 rtx insn = XVECEXP (pat, 0, i);
2975 if (INSN_P (insn))
2976 reset_insn_used_flags (insn);
2982 /* Verify sharing in INSN. */
2984 static void
2985 verify_insn_sharing (rtx insn)
2987 gcc_assert (INSN_P (insn));
2988 verify_rtx_sharing (PATTERN (insn), insn);
2989 verify_rtx_sharing (REG_NOTES (insn), insn);
2990 if (CALL_P (insn))
2991 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2994 /* Go through all the RTL insn bodies and check that there is no unexpected
2995 sharing in between the subexpressions. */
2997 DEBUG_FUNCTION void
2998 verify_rtl_sharing (void)
3000 rtx_insn *p;
3002 timevar_push (TV_VERIFY_RTL_SHARING);
3004 reset_all_used_flags ();
3006 for (p = get_insns (); p; p = NEXT_INSN (p))
3007 if (INSN_P (p))
3009 rtx pat = PATTERN (p);
3010 if (GET_CODE (pat) != SEQUENCE)
3011 verify_insn_sharing (p);
3012 else
3013 for (int i = 0; i < XVECLEN (pat, 0); i++)
3015 rtx insn = XVECEXP (pat, 0, i);
3016 if (INSN_P (insn))
3017 verify_insn_sharing (insn);
3021 reset_all_used_flags ();
3023 timevar_pop (TV_VERIFY_RTL_SHARING);
3026 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3027 Assumes the mark bits are cleared at entry. */
3029 void
3030 unshare_all_rtl_in_chain (rtx_insn *insn)
3032 for (; insn; insn = NEXT_INSN (insn))
3033 if (INSN_P (insn))
3035 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3036 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3037 if (CALL_P (insn))
3038 CALL_INSN_FUNCTION_USAGE (insn)
3039 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3043 /* Go through all virtual stack slots of a function and mark them as
3044 shared. We never replace the DECL_RTLs themselves with a copy,
3045 but expressions mentioned into a DECL_RTL cannot be shared with
3046 expressions in the instruction stream.
3048 Note that reload may convert pseudo registers into memories in-place.
3049 Pseudo registers are always shared, but MEMs never are. Thus if we
3050 reset the used flags on MEMs in the instruction stream, we must set
3051 them again on MEMs that appear in DECL_RTLs. */
3053 static void
3054 set_used_decls (tree blk)
3056 tree t;
3058 /* Mark decls. */
3059 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3060 if (DECL_RTL_SET_P (t))
3061 set_used_flags (DECL_RTL (t));
3063 /* Now process sub-blocks. */
3064 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3065 set_used_decls (t);
3068 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3069 Recursively does the same for subexpressions. Uses
3070 copy_rtx_if_shared_1 to reduce stack space. */
3073 copy_rtx_if_shared (rtx orig)
3075 copy_rtx_if_shared_1 (&orig);
3076 return orig;
3079 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3080 use. Recursively does the same for subexpressions. */
3082 static void
3083 copy_rtx_if_shared_1 (rtx *orig1)
3085 rtx x;
3086 int i;
3087 enum rtx_code code;
3088 rtx *last_ptr;
3089 const char *format_ptr;
3090 int copied = 0;
3091 int length;
3093 /* Repeat is used to turn tail-recursion into iteration. */
3094 repeat:
3095 x = *orig1;
3097 if (x == 0)
3098 return;
3100 code = GET_CODE (x);
3102 /* These types may be freely shared. */
3104 switch (code)
3106 case REG:
3107 case DEBUG_EXPR:
3108 case VALUE:
3109 CASE_CONST_ANY:
3110 case SYMBOL_REF:
3111 case LABEL_REF:
3112 case CODE_LABEL:
3113 case PC:
3114 case RETURN:
3115 case SIMPLE_RETURN:
3116 case SCRATCH:
3117 /* SCRATCH must be shared because they represent distinct values. */
3118 return;
3119 case CLOBBER:
3120 /* Share clobbers of hard registers, but do not share pseudo reg
3121 clobbers or clobbers of hard registers that originated as pseudos.
3122 This is needed to allow safe register renaming. */
3123 if (REG_P (XEXP (x, 0))
3124 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3125 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3126 return;
3127 break;
3129 case CONST:
3130 if (shared_const_p (x))
3131 return;
3132 break;
3134 case DEBUG_INSN:
3135 case INSN:
3136 case JUMP_INSN:
3137 case CALL_INSN:
3138 case NOTE:
3139 case BARRIER:
3140 /* The chain of insns is not being copied. */
3141 return;
3143 default:
3144 break;
3147 /* This rtx may not be shared. If it has already been seen,
3148 replace it with a copy of itself. */
3150 if (RTX_FLAG (x, used))
3152 x = shallow_copy_rtx (x);
3153 copied = 1;
3155 RTX_FLAG (x, used) = 1;
3157 /* Now scan the subexpressions recursively.
3158 We can store any replaced subexpressions directly into X
3159 since we know X is not shared! Any vectors in X
3160 must be copied if X was copied. */
3162 format_ptr = GET_RTX_FORMAT (code);
3163 length = GET_RTX_LENGTH (code);
3164 last_ptr = NULL;
3166 for (i = 0; i < length; i++)
3168 switch (*format_ptr++)
3170 case 'e':
3171 if (last_ptr)
3172 copy_rtx_if_shared_1 (last_ptr);
3173 last_ptr = &XEXP (x, i);
3174 break;
3176 case 'E':
3177 if (XVEC (x, i) != NULL)
3179 int j;
3180 int len = XVECLEN (x, i);
3182 /* Copy the vector iff I copied the rtx and the length
3183 is nonzero. */
3184 if (copied && len > 0)
3185 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3187 /* Call recursively on all inside the vector. */
3188 for (j = 0; j < len; j++)
3190 if (last_ptr)
3191 copy_rtx_if_shared_1 (last_ptr);
3192 last_ptr = &XVECEXP (x, i, j);
3195 break;
3198 *orig1 = x;
3199 if (last_ptr)
3201 orig1 = last_ptr;
3202 goto repeat;
3204 return;
3207 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3209 static void
3210 mark_used_flags (rtx x, int flag)
3212 int i, j;
3213 enum rtx_code code;
3214 const char *format_ptr;
3215 int length;
3217 /* Repeat is used to turn tail-recursion into iteration. */
3218 repeat:
3219 if (x == 0)
3220 return;
3222 code = GET_CODE (x);
3224 /* These types may be freely shared so we needn't do any resetting
3225 for them. */
3227 switch (code)
3229 case REG:
3230 case DEBUG_EXPR:
3231 case VALUE:
3232 CASE_CONST_ANY:
3233 case SYMBOL_REF:
3234 case CODE_LABEL:
3235 case PC:
3236 case RETURN:
3237 case SIMPLE_RETURN:
3238 return;
3240 case DEBUG_INSN:
3241 case INSN:
3242 case JUMP_INSN:
3243 case CALL_INSN:
3244 case NOTE:
3245 case LABEL_REF:
3246 case BARRIER:
3247 /* The chain of insns is not being copied. */
3248 return;
3250 default:
3251 break;
3254 RTX_FLAG (x, used) = flag;
3256 format_ptr = GET_RTX_FORMAT (code);
3257 length = GET_RTX_LENGTH (code);
3259 for (i = 0; i < length; i++)
3261 switch (*format_ptr++)
3263 case 'e':
3264 if (i == length-1)
3266 x = XEXP (x, i);
3267 goto repeat;
3269 mark_used_flags (XEXP (x, i), flag);
3270 break;
3272 case 'E':
3273 for (j = 0; j < XVECLEN (x, i); j++)
3274 mark_used_flags (XVECEXP (x, i, j), flag);
3275 break;
3280 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3281 to look for shared sub-parts. */
3283 void
3284 reset_used_flags (rtx x)
3286 mark_used_flags (x, 0);
3289 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3290 to look for shared sub-parts. */
3292 void
3293 set_used_flags (rtx x)
3295 mark_used_flags (x, 1);
3298 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3299 Return X or the rtx for the pseudo reg the value of X was copied into.
3300 OTHER must be valid as a SET_DEST. */
3303 make_safe_from (rtx x, rtx other)
3305 while (1)
3306 switch (GET_CODE (other))
3308 case SUBREG:
3309 other = SUBREG_REG (other);
3310 break;
3311 case STRICT_LOW_PART:
3312 case SIGN_EXTEND:
3313 case ZERO_EXTEND:
3314 other = XEXP (other, 0);
3315 break;
3316 default:
3317 goto done;
3319 done:
3320 if ((MEM_P (other)
3321 && ! CONSTANT_P (x)
3322 && !REG_P (x)
3323 && GET_CODE (x) != SUBREG)
3324 || (REG_P (other)
3325 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3326 || reg_mentioned_p (other, x))))
3328 rtx temp = gen_reg_rtx (GET_MODE (x));
3329 emit_move_insn (temp, x);
3330 return temp;
3332 return x;
3335 /* Emission of insns (adding them to the doubly-linked list). */
3337 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3339 rtx_insn *
3340 get_last_insn_anywhere (void)
3342 struct sequence_stack *seq;
3343 for (seq = get_current_sequence (); seq; seq = seq->next)
3344 if (seq->last != 0)
3345 return seq->last;
3346 return 0;
3349 /* Return the first nonnote insn emitted in current sequence or current
3350 function. This routine looks inside SEQUENCEs. */
3352 rtx_insn *
3353 get_first_nonnote_insn (void)
3355 rtx_insn *insn = get_insns ();
3357 if (insn)
3359 if (NOTE_P (insn))
3360 for (insn = next_insn (insn);
3361 insn && NOTE_P (insn);
3362 insn = next_insn (insn))
3363 continue;
3364 else
3366 if (NONJUMP_INSN_P (insn)
3367 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3368 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3372 return insn;
3375 /* Return the last nonnote insn emitted in current sequence or current
3376 function. This routine looks inside SEQUENCEs. */
3378 rtx_insn *
3379 get_last_nonnote_insn (void)
3381 rtx_insn *insn = get_last_insn ();
3383 if (insn)
3385 if (NOTE_P (insn))
3386 for (insn = previous_insn (insn);
3387 insn && NOTE_P (insn);
3388 insn = previous_insn (insn))
3389 continue;
3390 else
3392 if (NONJUMP_INSN_P (insn))
3393 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3394 insn = seq->insn (seq->len () - 1);
3398 return insn;
3401 /* Return the number of actual (non-debug) insns emitted in this
3402 function. */
3405 get_max_insn_count (void)
3407 int n = cur_insn_uid;
3409 /* The table size must be stable across -g, to avoid codegen
3410 differences due to debug insns, and not be affected by
3411 -fmin-insn-uid, to avoid excessive table size and to simplify
3412 debugging of -fcompare-debug failures. */
3413 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3414 n -= cur_debug_insn_uid;
3415 else
3416 n -= param_min_nondebug_insn_uid;
3418 return n;
3422 /* Return the next insn. If it is a SEQUENCE, return the first insn
3423 of the sequence. */
3425 rtx_insn *
3426 next_insn (rtx_insn *insn)
3428 if (insn)
3430 insn = NEXT_INSN (insn);
3431 if (insn && NONJUMP_INSN_P (insn)
3432 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3433 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3436 return insn;
3439 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3440 of the sequence. */
3442 rtx_insn *
3443 previous_insn (rtx_insn *insn)
3445 if (insn)
3447 insn = PREV_INSN (insn);
3448 if (insn && NONJUMP_INSN_P (insn))
3449 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3450 insn = seq->insn (seq->len () - 1);
3453 return insn;
3456 /* Return the next insn after INSN that is not a NOTE. This routine does not
3457 look inside SEQUENCEs. */
3459 rtx_insn *
3460 next_nonnote_insn (rtx_insn *insn)
3462 while (insn)
3464 insn = NEXT_INSN (insn);
3465 if (insn == 0 || !NOTE_P (insn))
3466 break;
3469 return insn;
3472 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3473 routine does not look inside SEQUENCEs. */
3475 rtx_insn *
3476 next_nondebug_insn (rtx_insn *insn)
3478 while (insn)
3480 insn = NEXT_INSN (insn);
3481 if (insn == 0 || !DEBUG_INSN_P (insn))
3482 break;
3485 return insn;
3488 /* Return the previous insn before INSN that is not a NOTE. This routine does
3489 not look inside SEQUENCEs. */
3491 rtx_insn *
3492 prev_nonnote_insn (rtx_insn *insn)
3494 while (insn)
3496 insn = PREV_INSN (insn);
3497 if (insn == 0 || !NOTE_P (insn))
3498 break;
3501 return insn;
3504 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3505 This routine does not look inside SEQUENCEs. */
3507 rtx_insn *
3508 prev_nondebug_insn (rtx_insn *insn)
3510 while (insn)
3512 insn = PREV_INSN (insn);
3513 if (insn == 0 || !DEBUG_INSN_P (insn))
3514 break;
3517 return insn;
3520 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3521 This routine does not look inside SEQUENCEs. */
3523 rtx_insn *
3524 next_nonnote_nondebug_insn (rtx_insn *insn)
3526 while (insn)
3528 insn = NEXT_INSN (insn);
3529 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3530 break;
3533 return insn;
3536 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3537 but stop the search before we enter another basic block. This
3538 routine does not look inside SEQUENCEs. */
3540 rtx_insn *
3541 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3543 while (insn)
3545 insn = NEXT_INSN (insn);
3546 if (insn == 0)
3547 break;
3548 if (DEBUG_INSN_P (insn))
3549 continue;
3550 if (!NOTE_P (insn))
3551 break;
3552 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3553 return NULL;
3556 return insn;
3559 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3560 This routine does not look inside SEQUENCEs. */
3562 rtx_insn *
3563 prev_nonnote_nondebug_insn (rtx_insn *insn)
3565 while (insn)
3567 insn = PREV_INSN (insn);
3568 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3569 break;
3572 return insn;
3575 /* Return the previous insn before INSN that is not a NOTE nor
3576 DEBUG_INSN, but stop the search before we enter another basic
3577 block. This routine does not look inside SEQUENCEs. */
3579 rtx_insn *
3580 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3582 while (insn)
3584 insn = PREV_INSN (insn);
3585 if (insn == 0)
3586 break;
3587 if (DEBUG_INSN_P (insn))
3588 continue;
3589 if (!NOTE_P (insn))
3590 break;
3591 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3592 return NULL;
3595 return insn;
3598 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3599 or 0, if there is none. This routine does not look inside
3600 SEQUENCEs. */
3602 rtx_insn *
3603 next_real_insn (rtx_insn *insn)
3605 while (insn)
3607 insn = NEXT_INSN (insn);
3608 if (insn == 0 || INSN_P (insn))
3609 break;
3612 return insn;
3615 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3616 or 0, if there is none. This routine does not look inside
3617 SEQUENCEs. */
3619 rtx_insn *
3620 prev_real_insn (rtx_insn *insn)
3622 while (insn)
3624 insn = PREV_INSN (insn);
3625 if (insn == 0 || INSN_P (insn))
3626 break;
3629 return insn;
3632 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3633 or 0, if there is none. This routine does not look inside
3634 SEQUENCEs. */
3636 rtx_insn *
3637 next_real_nondebug_insn (rtx uncast_insn)
3639 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3641 while (insn)
3643 insn = NEXT_INSN (insn);
3644 if (insn == 0 || NONDEBUG_INSN_P (insn))
3645 break;
3648 return insn;
3651 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3652 or 0, if there is none. This routine does not look inside
3653 SEQUENCEs. */
3655 rtx_insn *
3656 prev_real_nondebug_insn (rtx_insn *insn)
3658 while (insn)
3660 insn = PREV_INSN (insn);
3661 if (insn == 0 || NONDEBUG_INSN_P (insn))
3662 break;
3665 return insn;
3668 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3669 This routine does not look inside SEQUENCEs. */
3671 rtx_call_insn *
3672 last_call_insn (void)
3674 rtx_insn *insn;
3676 for (insn = get_last_insn ();
3677 insn && !CALL_P (insn);
3678 insn = PREV_INSN (insn))
3681 return safe_as_a <rtx_call_insn *> (insn);
3684 /* Find the next insn after INSN that really does something. This routine
3685 does not look inside SEQUENCEs. After reload this also skips over
3686 standalone USE and CLOBBER insn. */
3689 active_insn_p (const rtx_insn *insn)
3691 return (CALL_P (insn) || JUMP_P (insn)
3692 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3693 || (NONJUMP_INSN_P (insn)
3694 && (! reload_completed
3695 || (GET_CODE (PATTERN (insn)) != USE
3696 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3699 rtx_insn *
3700 next_active_insn (rtx_insn *insn)
3702 while (insn)
3704 insn = NEXT_INSN (insn);
3705 if (insn == 0 || active_insn_p (insn))
3706 break;
3709 return insn;
3712 /* Find the last insn before INSN that really does something. This routine
3713 does not look inside SEQUENCEs. After reload this also skips over
3714 standalone USE and CLOBBER insn. */
3716 rtx_insn *
3717 prev_active_insn (rtx_insn *insn)
3719 while (insn)
3721 insn = PREV_INSN (insn);
3722 if (insn == 0 || active_insn_p (insn))
3723 break;
3726 return insn;
3729 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3731 static int
3732 find_auto_inc (const_rtx x, const_rtx reg)
3734 subrtx_iterator::array_type array;
3735 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3737 const_rtx x = *iter;
3738 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3739 && rtx_equal_p (reg, XEXP (x, 0)))
3740 return true;
3742 return false;
3745 /* Increment the label uses for all labels present in rtx. */
3747 static void
3748 mark_label_nuses (rtx x)
3750 enum rtx_code code;
3751 int i, j;
3752 const char *fmt;
3754 code = GET_CODE (x);
3755 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3756 LABEL_NUSES (label_ref_label (x))++;
3758 fmt = GET_RTX_FORMAT (code);
3759 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3761 if (fmt[i] == 'e')
3762 mark_label_nuses (XEXP (x, i));
3763 else if (fmt[i] == 'E')
3764 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3765 mark_label_nuses (XVECEXP (x, i, j));
3770 /* Try splitting insns that can be split for better scheduling.
3771 PAT is the pattern which might split.
3772 TRIAL is the insn providing PAT.
3773 LAST is nonzero if we should return the last insn of the sequence produced.
3775 If this routine succeeds in splitting, it returns the first or last
3776 replacement insn depending on the value of LAST. Otherwise, it
3777 returns TRIAL. If the insn to be returned can be split, it will be. */
3779 rtx_insn *
3780 try_split (rtx pat, rtx_insn *trial, int last)
3782 rtx_insn *before, *after;
3783 rtx note;
3784 rtx_insn *seq, *tem;
3785 profile_probability probability;
3786 rtx_insn *insn_last, *insn;
3787 int njumps = 0;
3788 rtx_insn *call_insn = NULL;
3790 if (any_condjump_p (trial)
3791 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3792 split_branch_probability
3793 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3794 else
3795 split_branch_probability = profile_probability::uninitialized ();
3797 probability = split_branch_probability;
3799 seq = split_insns (pat, trial);
3801 split_branch_probability = profile_probability::uninitialized ();
3803 if (!seq)
3804 return trial;
3806 int split_insn_count = 0;
3807 /* Avoid infinite loop if any insn of the result matches
3808 the original pattern. */
3809 insn_last = seq;
3810 while (1)
3812 if (INSN_P (insn_last)
3813 && rtx_equal_p (PATTERN (insn_last), pat))
3814 return trial;
3815 split_insn_count++;
3816 if (!NEXT_INSN (insn_last))
3817 break;
3818 insn_last = NEXT_INSN (insn_last);
3821 /* We're not good at redistributing frame information if
3822 the split occurs before reload or if it results in more
3823 than one insn. */
3824 if (RTX_FRAME_RELATED_P (trial))
3826 if (!reload_completed || split_insn_count != 1)
3827 return trial;
3829 rtx_insn *new_insn = seq;
3830 rtx_insn *old_insn = trial;
3831 copy_frame_info_to_split_insn (old_insn, new_insn);
3834 /* We will be adding the new sequence to the function. The splitters
3835 may have introduced invalid RTL sharing, so unshare the sequence now. */
3836 unshare_all_rtl_in_chain (seq);
3838 /* Mark labels and copy flags. */
3839 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3841 if (JUMP_P (insn))
3843 if (JUMP_P (trial))
3844 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3845 mark_jump_label (PATTERN (insn), insn, 0);
3846 njumps++;
3847 if (probability.initialized_p ()
3848 && any_condjump_p (insn)
3849 && !find_reg_note (insn, REG_BR_PROB, 0))
3851 /* We can preserve the REG_BR_PROB notes only if exactly
3852 one jump is created, otherwise the machine description
3853 is responsible for this step using
3854 split_branch_probability variable. */
3855 gcc_assert (njumps == 1);
3856 add_reg_br_prob_note (insn, probability);
3861 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3862 in SEQ and copy any additional information across. */
3863 if (CALL_P (trial))
3865 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3866 if (CALL_P (insn))
3868 gcc_assert (call_insn == NULL_RTX);
3869 call_insn = insn;
3871 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3872 target may have explicitly specified. */
3873 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3874 while (*p)
3875 p = &XEXP (*p, 1);
3876 *p = CALL_INSN_FUNCTION_USAGE (trial);
3878 /* If the old call was a sibling call, the new one must
3879 be too. */
3880 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3884 /* Copy notes, particularly those related to the CFG. */
3885 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3887 switch (REG_NOTE_KIND (note))
3889 case REG_EH_REGION:
3890 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3891 break;
3893 case REG_NORETURN:
3894 case REG_SETJMP:
3895 case REG_TM:
3896 case REG_CALL_NOCF_CHECK:
3897 case REG_CALL_ARG_LOCATION:
3898 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3900 if (CALL_P (insn))
3901 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3903 break;
3905 case REG_NON_LOCAL_GOTO:
3906 case REG_LABEL_TARGET:
3907 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3909 if (JUMP_P (insn))
3910 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3912 break;
3914 case REG_INC:
3915 if (!AUTO_INC_DEC)
3916 break;
3918 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3920 rtx reg = XEXP (note, 0);
3921 if (!FIND_REG_INC_NOTE (insn, reg)
3922 && find_auto_inc (PATTERN (insn), reg))
3923 add_reg_note (insn, REG_INC, reg);
3925 break;
3927 case REG_ARGS_SIZE:
3928 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3929 break;
3931 case REG_CALL_DECL:
3932 case REG_UNTYPED_CALL:
3933 gcc_assert (call_insn != NULL_RTX);
3934 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3935 break;
3937 default:
3938 break;
3942 /* If there are LABELS inside the split insns increment the
3943 usage count so we don't delete the label. */
3944 if (INSN_P (trial))
3946 insn = insn_last;
3947 while (insn != NULL_RTX)
3949 /* JUMP_P insns have already been "marked" above. */
3950 if (NONJUMP_INSN_P (insn))
3951 mark_label_nuses (PATTERN (insn));
3953 insn = PREV_INSN (insn);
3957 before = PREV_INSN (trial);
3958 after = NEXT_INSN (trial);
3960 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3962 delete_insn (trial);
3964 /* Recursively call try_split for each new insn created; by the
3965 time control returns here that insn will be fully split, so
3966 set LAST and continue from the insn after the one returned.
3967 We can't use next_active_insn here since AFTER may be a note.
3968 Ignore deleted insns, which can be occur if not optimizing. */
3969 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3970 if (! tem->deleted () && INSN_P (tem))
3971 tem = try_split (PATTERN (tem), tem, 1);
3973 /* Return either the first or the last insn, depending on which was
3974 requested. */
3975 return last
3976 ? (after ? PREV_INSN (after) : get_last_insn ())
3977 : NEXT_INSN (before);
3980 /* Make and return an INSN rtx, initializing all its slots.
3981 Store PATTERN in the pattern slots. */
3983 rtx_insn *
3984 make_insn_raw (rtx pattern)
3986 rtx_insn *insn;
3988 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3990 INSN_UID (insn) = cur_insn_uid++;
3991 PATTERN (insn) = pattern;
3992 INSN_CODE (insn) = -1;
3993 REG_NOTES (insn) = NULL;
3994 INSN_LOCATION (insn) = curr_insn_location ();
3995 BLOCK_FOR_INSN (insn) = NULL;
3997 #ifdef ENABLE_RTL_CHECKING
3998 if (insn
3999 && INSN_P (insn)
4000 && (returnjump_p (insn)
4001 || (GET_CODE (insn) == SET
4002 && SET_DEST (insn) == pc_rtx)))
4004 warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
4005 debug_rtx (insn);
4007 #endif
4009 return insn;
4012 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4014 static rtx_insn *
4015 make_debug_insn_raw (rtx pattern)
4017 rtx_debug_insn *insn;
4019 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4020 INSN_UID (insn) = cur_debug_insn_uid++;
4021 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4022 INSN_UID (insn) = cur_insn_uid++;
4024 PATTERN (insn) = pattern;
4025 INSN_CODE (insn) = -1;
4026 REG_NOTES (insn) = NULL;
4027 INSN_LOCATION (insn) = curr_insn_location ();
4028 BLOCK_FOR_INSN (insn) = NULL;
4030 return insn;
4033 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4035 static rtx_insn *
4036 make_jump_insn_raw (rtx pattern)
4038 rtx_jump_insn *insn;
4040 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4041 INSN_UID (insn) = cur_insn_uid++;
4043 PATTERN (insn) = pattern;
4044 INSN_CODE (insn) = -1;
4045 REG_NOTES (insn) = NULL;
4046 JUMP_LABEL (insn) = NULL;
4047 INSN_LOCATION (insn) = curr_insn_location ();
4048 BLOCK_FOR_INSN (insn) = NULL;
4050 return insn;
4053 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4055 static rtx_insn *
4056 make_call_insn_raw (rtx pattern)
4058 rtx_call_insn *insn;
4060 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4061 INSN_UID (insn) = cur_insn_uid++;
4063 PATTERN (insn) = pattern;
4064 INSN_CODE (insn) = -1;
4065 REG_NOTES (insn) = NULL;
4066 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4067 INSN_LOCATION (insn) = curr_insn_location ();
4068 BLOCK_FOR_INSN (insn) = NULL;
4070 return insn;
4073 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4075 static rtx_note *
4076 make_note_raw (enum insn_note subtype)
4078 /* Some notes are never created this way at all. These notes are
4079 only created by patching out insns. */
4080 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4081 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4083 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4084 INSN_UID (note) = cur_insn_uid++;
4085 NOTE_KIND (note) = subtype;
4086 BLOCK_FOR_INSN (note) = NULL;
4087 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4088 return note;
4091 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4092 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4093 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4095 static inline void
4096 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4098 SET_PREV_INSN (insn) = prev;
4099 SET_NEXT_INSN (insn) = next;
4100 if (prev != NULL)
4102 SET_NEXT_INSN (prev) = insn;
4103 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4105 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4106 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4109 if (next != NULL)
4111 SET_PREV_INSN (next) = insn;
4112 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4114 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4115 SET_PREV_INSN (sequence->insn (0)) = insn;
4119 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4121 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4122 SET_PREV_INSN (sequence->insn (0)) = prev;
4123 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4127 /* Add INSN to the end of the doubly-linked list.
4128 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4130 void
4131 add_insn (rtx_insn *insn)
4133 rtx_insn *prev = get_last_insn ();
4134 link_insn_into_chain (insn, prev, NULL);
4135 if (get_insns () == NULL)
4136 set_first_insn (insn);
4137 set_last_insn (insn);
4140 /* Add INSN into the doubly-linked list after insn AFTER. */
4142 static void
4143 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4145 rtx_insn *next = NEXT_INSN (after);
4147 gcc_assert (!optimize || !after->deleted ());
4149 link_insn_into_chain (insn, after, next);
4151 if (next == NULL)
4153 struct sequence_stack *seq;
4155 for (seq = get_current_sequence (); seq; seq = seq->next)
4156 if (after == seq->last)
4158 seq->last = insn;
4159 break;
4164 /* Add INSN into the doubly-linked list before insn BEFORE. */
4166 static void
4167 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4169 rtx_insn *prev = PREV_INSN (before);
4171 gcc_assert (!optimize || !before->deleted ());
4173 link_insn_into_chain (insn, prev, before);
4175 if (prev == NULL)
4177 struct sequence_stack *seq;
4179 for (seq = get_current_sequence (); seq; seq = seq->next)
4180 if (before == seq->first)
4182 seq->first = insn;
4183 break;
4186 gcc_assert (seq);
4190 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4191 If BB is NULL, an attempt is made to infer the bb from before.
4193 This and the next function should be the only functions called
4194 to insert an insn once delay slots have been filled since only
4195 they know how to update a SEQUENCE. */
4197 void
4198 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4200 add_insn_after_nobb (insn, after);
4201 if (!BARRIER_P (after)
4202 && !BARRIER_P (insn)
4203 && (bb = BLOCK_FOR_INSN (after)))
4205 set_block_for_insn (insn, bb);
4206 if (INSN_P (insn))
4207 df_insn_rescan (insn);
4208 /* Should not happen as first in the BB is always
4209 either NOTE or LABEL. */
4210 if (BB_END (bb) == after
4211 /* Avoid clobbering of structure when creating new BB. */
4212 && !BARRIER_P (insn)
4213 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4214 BB_END (bb) = insn;
4218 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4219 If BB is NULL, an attempt is made to infer the bb from before.
4221 This and the previous function should be the only functions called
4222 to insert an insn once delay slots have been filled since only
4223 they know how to update a SEQUENCE. */
4225 void
4226 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4228 add_insn_before_nobb (insn, before);
4230 if (!bb
4231 && !BARRIER_P (before)
4232 && !BARRIER_P (insn))
4233 bb = BLOCK_FOR_INSN (before);
4235 if (bb)
4237 set_block_for_insn (insn, bb);
4238 if (INSN_P (insn))
4239 df_insn_rescan (insn);
4240 /* Should not happen as first in the BB is always either NOTE or
4241 LABEL. */
4242 gcc_assert (BB_HEAD (bb) != insn
4243 /* Avoid clobbering of structure when creating new BB. */
4244 || BARRIER_P (insn)
4245 || NOTE_INSN_BASIC_BLOCK_P (insn));
4249 /* Replace insn with an deleted instruction note. */
4251 void
4252 set_insn_deleted (rtx_insn *insn)
4254 if (INSN_P (insn))
4255 df_insn_delete (insn);
4256 PUT_CODE (insn, NOTE);
4257 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4261 /* Unlink INSN from the insn chain.
4263 This function knows how to handle sequences.
4265 This function does not invalidate data flow information associated with
4266 INSN (i.e. does not call df_insn_delete). That makes this function
4267 usable for only disconnecting an insn from the chain, and re-emit it
4268 elsewhere later.
4270 To later insert INSN elsewhere in the insn chain via add_insn and
4271 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4272 the caller. Nullifying them here breaks many insn chain walks.
4274 To really delete an insn and related DF information, use delete_insn. */
4276 void
4277 remove_insn (rtx_insn *insn)
4279 rtx_insn *next = NEXT_INSN (insn);
4280 rtx_insn *prev = PREV_INSN (insn);
4281 basic_block bb;
4283 if (prev)
4285 SET_NEXT_INSN (prev) = next;
4286 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4288 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4289 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4292 else
4294 struct sequence_stack *seq;
4296 for (seq = get_current_sequence (); seq; seq = seq->next)
4297 if (insn == seq->first)
4299 seq->first = next;
4300 break;
4303 gcc_assert (seq);
4306 if (next)
4308 SET_PREV_INSN (next) = prev;
4309 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4311 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4312 SET_PREV_INSN (sequence->insn (0)) = prev;
4315 else
4317 struct sequence_stack *seq;
4319 for (seq = get_current_sequence (); seq; seq = seq->next)
4320 if (insn == seq->last)
4322 seq->last = prev;
4323 break;
4326 gcc_assert (seq);
4329 /* Fix up basic block boundaries, if necessary. */
4330 if (!BARRIER_P (insn)
4331 && (bb = BLOCK_FOR_INSN (insn)))
4333 if (BB_HEAD (bb) == insn)
4335 /* Never ever delete the basic block note without deleting whole
4336 basic block. */
4337 gcc_assert (!NOTE_P (insn));
4338 BB_HEAD (bb) = next;
4340 if (BB_END (bb) == insn)
4341 BB_END (bb) = prev;
4345 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4347 void
4348 add_function_usage_to (rtx call_insn, rtx call_fusage)
4350 gcc_assert (call_insn && CALL_P (call_insn));
4352 /* Put the register usage information on the CALL. If there is already
4353 some usage information, put ours at the end. */
4354 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4356 rtx link;
4358 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4359 link = XEXP (link, 1))
4362 XEXP (link, 1) = call_fusage;
4364 else
4365 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4368 /* Delete all insns made since FROM.
4369 FROM becomes the new last instruction. */
4371 void
4372 delete_insns_since (rtx_insn *from)
4374 if (from == 0)
4375 set_first_insn (0);
4376 else
4377 SET_NEXT_INSN (from) = 0;
4378 set_last_insn (from);
4381 /* This function is deprecated, please use sequences instead.
4383 Move a consecutive bunch of insns to a different place in the chain.
4384 The insns to be moved are those between FROM and TO.
4385 They are moved to a new position after the insn AFTER.
4386 AFTER must not be FROM or TO or any insn in between.
4388 This function does not know about SEQUENCEs and hence should not be
4389 called after delay-slot filling has been done. */
4391 void
4392 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4394 if (flag_checking)
4396 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4397 gcc_assert (after != x);
4398 gcc_assert (after != to);
4401 /* Splice this bunch out of where it is now. */
4402 if (PREV_INSN (from))
4403 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4404 if (NEXT_INSN (to))
4405 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4406 if (get_last_insn () == to)
4407 set_last_insn (PREV_INSN (from));
4408 if (get_insns () == from)
4409 set_first_insn (NEXT_INSN (to));
4411 /* Make the new neighbors point to it and it to them. */
4412 if (NEXT_INSN (after))
4413 SET_PREV_INSN (NEXT_INSN (after)) = to;
4415 SET_NEXT_INSN (to) = NEXT_INSN (after);
4416 SET_PREV_INSN (from) = after;
4417 SET_NEXT_INSN (after) = from;
4418 if (after == get_last_insn ())
4419 set_last_insn (to);
4422 /* Same as function above, but take care to update BB boundaries. */
4423 void
4424 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4426 rtx_insn *prev = PREV_INSN (from);
4427 basic_block bb, bb2;
4429 reorder_insns_nobb (from, to, after);
4431 if (!BARRIER_P (after)
4432 && (bb = BLOCK_FOR_INSN (after)))
4434 rtx_insn *x;
4435 df_set_bb_dirty (bb);
4437 if (!BARRIER_P (from)
4438 && (bb2 = BLOCK_FOR_INSN (from)))
4440 if (BB_END (bb2) == to)
4441 BB_END (bb2) = prev;
4442 df_set_bb_dirty (bb2);
4445 if (BB_END (bb) == after)
4446 BB_END (bb) = to;
4448 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4449 if (!BARRIER_P (x))
4450 df_insn_change_bb (x, bb);
4455 /* Emit insn(s) of given code and pattern
4456 at a specified place within the doubly-linked list.
4458 All of the emit_foo global entry points accept an object
4459 X which is either an insn list or a PATTERN of a single
4460 instruction.
4462 There are thus a few canonical ways to generate code and
4463 emit it at a specific place in the instruction stream. For
4464 example, consider the instruction named SPOT and the fact that
4465 we would like to emit some instructions before SPOT. We might
4466 do it like this:
4468 start_sequence ();
4469 ... emit the new instructions ...
4470 insns_head = get_insns ();
4471 end_sequence ();
4473 emit_insn_before (insns_head, SPOT);
4475 It used to be common to generate SEQUENCE rtl instead, but that
4476 is a relic of the past which no longer occurs. The reason is that
4477 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4478 generated would almost certainly die right after it was created. */
4480 static rtx_insn *
4481 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4482 basic_block bb,
4483 rtx_insn *(*make_raw) (rtx))
4485 rtx_insn *insn;
4487 gcc_assert (before);
4489 if (x == NULL_RTX)
4490 return last;
4492 switch (GET_CODE (x))
4494 case DEBUG_INSN:
4495 case INSN:
4496 case JUMP_INSN:
4497 case CALL_INSN:
4498 case CODE_LABEL:
4499 case BARRIER:
4500 case NOTE:
4501 insn = as_a <rtx_insn *> (x);
4502 while (insn)
4504 rtx_insn *next = NEXT_INSN (insn);
4505 add_insn_before (insn, before, bb);
4506 last = insn;
4507 insn = next;
4509 break;
4511 #ifdef ENABLE_RTL_CHECKING
4512 case SEQUENCE:
4513 gcc_unreachable ();
4514 break;
4515 #endif
4517 default:
4518 last = (*make_raw) (x);
4519 add_insn_before (last, before, bb);
4520 break;
4523 return last;
4526 /* Make X be output before the instruction BEFORE. */
4528 rtx_insn *
4529 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4531 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4534 /* Make an instruction with body X and code JUMP_INSN
4535 and output it before the instruction BEFORE. */
4537 rtx_jump_insn *
4538 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4540 return as_a <rtx_jump_insn *> (
4541 emit_pattern_before_noloc (x, before, NULL, NULL,
4542 make_jump_insn_raw));
4545 /* Make an instruction with body X and code CALL_INSN
4546 and output it before the instruction BEFORE. */
4548 rtx_insn *
4549 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4551 return emit_pattern_before_noloc (x, before, NULL, NULL,
4552 make_call_insn_raw);
4555 /* Make an instruction with body X and code DEBUG_INSN
4556 and output it before the instruction BEFORE. */
4558 rtx_insn *
4559 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4561 return emit_pattern_before_noloc (x, before, NULL, NULL,
4562 make_debug_insn_raw);
4565 /* Make an insn of code BARRIER
4566 and output it before the insn BEFORE. */
4568 rtx_barrier *
4569 emit_barrier_before (rtx_insn *before)
4571 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4573 INSN_UID (insn) = cur_insn_uid++;
4575 add_insn_before (insn, before, NULL);
4576 return insn;
4579 /* Emit the label LABEL before the insn BEFORE. */
4581 rtx_code_label *
4582 emit_label_before (rtx_code_label *label, rtx_insn *before)
4584 gcc_checking_assert (INSN_UID (label) == 0);
4585 INSN_UID (label) = cur_insn_uid++;
4586 add_insn_before (label, before, NULL);
4587 return label;
4590 /* Helper for emit_insn_after, handles lists of instructions
4591 efficiently. */
4593 static rtx_insn *
4594 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4596 rtx_insn *last;
4597 rtx_insn *after_after;
4598 if (!bb && !BARRIER_P (after))
4599 bb = BLOCK_FOR_INSN (after);
4601 if (bb)
4603 df_set_bb_dirty (bb);
4604 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4605 if (!BARRIER_P (last))
4607 set_block_for_insn (last, bb);
4608 df_insn_rescan (last);
4610 if (!BARRIER_P (last))
4612 set_block_for_insn (last, bb);
4613 df_insn_rescan (last);
4615 if (BB_END (bb) == after)
4616 BB_END (bb) = last;
4618 else
4619 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4620 continue;
4622 after_after = NEXT_INSN (after);
4624 SET_NEXT_INSN (after) = first;
4625 SET_PREV_INSN (first) = after;
4626 SET_NEXT_INSN (last) = after_after;
4627 if (after_after)
4628 SET_PREV_INSN (after_after) = last;
4630 if (after == get_last_insn ())
4631 set_last_insn (last);
4633 return last;
4636 static rtx_insn *
4637 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4638 rtx_insn *(*make_raw)(rtx))
4640 rtx_insn *last = after;
4642 gcc_assert (after);
4644 if (x == NULL_RTX)
4645 return last;
4647 switch (GET_CODE (x))
4649 case DEBUG_INSN:
4650 case INSN:
4651 case JUMP_INSN:
4652 case CALL_INSN:
4653 case CODE_LABEL:
4654 case BARRIER:
4655 case NOTE:
4656 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4657 break;
4659 #ifdef ENABLE_RTL_CHECKING
4660 case SEQUENCE:
4661 gcc_unreachable ();
4662 break;
4663 #endif
4665 default:
4666 last = (*make_raw) (x);
4667 add_insn_after (last, after, bb);
4668 break;
4671 return last;
4674 /* Make X be output after the insn AFTER and set the BB of insn. If
4675 BB is NULL, an attempt is made to infer the BB from AFTER. */
4677 rtx_insn *
4678 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4680 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4684 /* Make an insn of code JUMP_INSN with body X
4685 and output it after the insn AFTER. */
4687 rtx_jump_insn *
4688 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4690 return as_a <rtx_jump_insn *> (
4691 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4694 /* Make an instruction with body X and code CALL_INSN
4695 and output it after the instruction AFTER. */
4697 rtx_insn *
4698 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4700 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4703 /* Make an instruction with body X and code CALL_INSN
4704 and output it after the instruction AFTER. */
4706 rtx_insn *
4707 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4709 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4712 /* Make an insn of code BARRIER
4713 and output it after the insn AFTER. */
4715 rtx_barrier *
4716 emit_barrier_after (rtx_insn *after)
4718 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4720 INSN_UID (insn) = cur_insn_uid++;
4722 add_insn_after (insn, after, NULL);
4723 return insn;
4726 /* Emit the label LABEL after the insn AFTER. */
4728 rtx_insn *
4729 emit_label_after (rtx_insn *label, rtx_insn *after)
4731 gcc_checking_assert (INSN_UID (label) == 0);
4732 INSN_UID (label) = cur_insn_uid++;
4733 add_insn_after (label, after, NULL);
4734 return label;
4737 /* Notes require a bit of special handling: Some notes need to have their
4738 BLOCK_FOR_INSN set, others should never have it set, and some should
4739 have it set or clear depending on the context. */
4741 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4742 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4743 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4745 static bool
4746 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4748 switch (subtype)
4750 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4751 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4752 return true;
4754 /* Notes for var tracking and EH region markers can appear between or
4755 inside basic blocks. If the caller is emitting on the basic block
4756 boundary, do not set BLOCK_FOR_INSN on the new note. */
4757 case NOTE_INSN_VAR_LOCATION:
4758 case NOTE_INSN_EH_REGION_BEG:
4759 case NOTE_INSN_EH_REGION_END:
4760 return on_bb_boundary_p;
4762 /* Otherwise, BLOCK_FOR_INSN must be set. */
4763 default:
4764 return false;
4768 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4770 rtx_note *
4771 emit_note_after (enum insn_note subtype, rtx_insn *after)
4773 rtx_note *note = make_note_raw (subtype);
4774 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4775 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4777 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4778 add_insn_after_nobb (note, after);
4779 else
4780 add_insn_after (note, after, bb);
4781 return note;
4784 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4786 rtx_note *
4787 emit_note_before (enum insn_note subtype, rtx_insn *before)
4789 rtx_note *note = make_note_raw (subtype);
4790 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4791 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4793 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4794 add_insn_before_nobb (note, before);
4795 else
4796 add_insn_before (note, before, bb);
4797 return note;
4800 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4801 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4803 static rtx_insn *
4804 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4805 rtx_insn *(*make_raw) (rtx))
4807 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4809 if (pattern == NULL_RTX || !loc)
4810 return last;
4812 after = NEXT_INSN (after);
4813 while (1)
4815 if (active_insn_p (after)
4816 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4817 && !INSN_LOCATION (after))
4818 INSN_LOCATION (after) = loc;
4819 if (after == last)
4820 break;
4821 after = NEXT_INSN (after);
4823 return last;
4826 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4827 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4828 any DEBUG_INSNs. */
4830 static rtx_insn *
4831 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4832 rtx_insn *(*make_raw) (rtx))
4834 rtx_insn *prev = after;
4836 if (skip_debug_insns)
4837 while (DEBUG_INSN_P (prev))
4838 prev = PREV_INSN (prev);
4840 if (INSN_P (prev))
4841 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4842 make_raw);
4843 else
4844 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4847 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4848 rtx_insn *
4849 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4851 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4854 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4855 rtx_insn *
4856 emit_insn_after (rtx pattern, rtx_insn *after)
4858 return emit_pattern_after (pattern, after, true, make_insn_raw);
4861 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4862 rtx_jump_insn *
4863 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4865 return as_a <rtx_jump_insn *> (
4866 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4869 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4870 rtx_jump_insn *
4871 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4873 return as_a <rtx_jump_insn *> (
4874 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4877 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4878 rtx_insn *
4879 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4881 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4884 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4885 rtx_insn *
4886 emit_call_insn_after (rtx pattern, rtx_insn *after)
4888 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4891 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4892 rtx_insn *
4893 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4895 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4898 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4899 rtx_insn *
4900 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4902 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4905 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4906 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4907 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4908 CALL_INSN, etc. */
4910 static rtx_insn *
4911 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4912 bool insnp, rtx_insn *(*make_raw) (rtx))
4914 rtx_insn *first = PREV_INSN (before);
4915 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4916 insnp ? before : NULL,
4917 NULL, make_raw);
4919 if (pattern == NULL_RTX || !loc)
4920 return last;
4922 if (!first)
4923 first = get_insns ();
4924 else
4925 first = NEXT_INSN (first);
4926 while (1)
4928 if (active_insn_p (first)
4929 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4930 && !INSN_LOCATION (first))
4931 INSN_LOCATION (first) = loc;
4932 if (first == last)
4933 break;
4934 first = NEXT_INSN (first);
4936 return last;
4939 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4940 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4941 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4942 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4944 static rtx_insn *
4945 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4946 bool insnp, rtx_insn *(*make_raw) (rtx))
4948 rtx_insn *next = before;
4950 if (skip_debug_insns)
4951 while (DEBUG_INSN_P (next))
4952 next = PREV_INSN (next);
4954 if (INSN_P (next))
4955 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4956 insnp, make_raw);
4957 else
4958 return emit_pattern_before_noloc (pattern, before,
4959 insnp ? before : NULL,
4960 NULL, make_raw);
4963 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4964 rtx_insn *
4965 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4967 return emit_pattern_before_setloc (pattern, before, loc, true,
4968 make_insn_raw);
4971 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4972 rtx_insn *
4973 emit_insn_before (rtx pattern, rtx_insn *before)
4975 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4978 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4979 rtx_jump_insn *
4980 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4982 return as_a <rtx_jump_insn *> (
4983 emit_pattern_before_setloc (pattern, before, loc, false,
4984 make_jump_insn_raw));
4987 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4988 rtx_jump_insn *
4989 emit_jump_insn_before (rtx pattern, rtx_insn *before)
4991 return as_a <rtx_jump_insn *> (
4992 emit_pattern_before (pattern, before, true, false,
4993 make_jump_insn_raw));
4996 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4997 rtx_insn *
4998 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5000 return emit_pattern_before_setloc (pattern, before, loc, false,
5001 make_call_insn_raw);
5004 /* Like emit_call_insn_before_noloc,
5005 but set insn_location according to BEFORE. */
5006 rtx_insn *
5007 emit_call_insn_before (rtx pattern, rtx_insn *before)
5009 return emit_pattern_before (pattern, before, true, false,
5010 make_call_insn_raw);
5013 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5014 rtx_insn *
5015 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5017 return emit_pattern_before_setloc (pattern, before, loc, false,
5018 make_debug_insn_raw);
5021 /* Like emit_debug_insn_before_noloc,
5022 but set insn_location according to BEFORE. */
5023 rtx_insn *
5024 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5026 return emit_pattern_before (pattern, before, false, false,
5027 make_debug_insn_raw);
5030 /* Take X and emit it at the end of the doubly-linked
5031 INSN list.
5033 Returns the last insn emitted. */
5035 rtx_insn *
5036 emit_insn (rtx x)
5038 rtx_insn *last = get_last_insn ();
5039 rtx_insn *insn;
5041 if (x == NULL_RTX)
5042 return last;
5044 switch (GET_CODE (x))
5046 case DEBUG_INSN:
5047 case INSN:
5048 case JUMP_INSN:
5049 case CALL_INSN:
5050 case CODE_LABEL:
5051 case BARRIER:
5052 case NOTE:
5053 insn = as_a <rtx_insn *> (x);
5054 while (insn)
5056 rtx_insn *next = NEXT_INSN (insn);
5057 add_insn (insn);
5058 last = insn;
5059 insn = next;
5061 break;
5063 #ifdef ENABLE_RTL_CHECKING
5064 case JUMP_TABLE_DATA:
5065 case SEQUENCE:
5066 gcc_unreachable ();
5067 break;
5068 #endif
5070 default:
5071 last = make_insn_raw (x);
5072 add_insn (last);
5073 break;
5076 return last;
5079 /* Make an insn of code DEBUG_INSN with pattern X
5080 and add it to the end of the doubly-linked list. */
5082 rtx_insn *
5083 emit_debug_insn (rtx x)
5085 rtx_insn *last = get_last_insn ();
5086 rtx_insn *insn;
5088 if (x == NULL_RTX)
5089 return last;
5091 switch (GET_CODE (x))
5093 case DEBUG_INSN:
5094 case INSN:
5095 case JUMP_INSN:
5096 case CALL_INSN:
5097 case CODE_LABEL:
5098 case BARRIER:
5099 case NOTE:
5100 insn = as_a <rtx_insn *> (x);
5101 while (insn)
5103 rtx_insn *next = NEXT_INSN (insn);
5104 add_insn (insn);
5105 last = insn;
5106 insn = next;
5108 break;
5110 #ifdef ENABLE_RTL_CHECKING
5111 case JUMP_TABLE_DATA:
5112 case SEQUENCE:
5113 gcc_unreachable ();
5114 break;
5115 #endif
5117 default:
5118 last = make_debug_insn_raw (x);
5119 add_insn (last);
5120 break;
5123 return last;
5126 /* Make an insn of code JUMP_INSN with pattern X
5127 and add it to the end of the doubly-linked list. */
5129 rtx_insn *
5130 emit_jump_insn (rtx x)
5132 rtx_insn *last = NULL;
5133 rtx_insn *insn;
5135 switch (GET_CODE (x))
5137 case DEBUG_INSN:
5138 case INSN:
5139 case JUMP_INSN:
5140 case CALL_INSN:
5141 case CODE_LABEL:
5142 case BARRIER:
5143 case NOTE:
5144 insn = as_a <rtx_insn *> (x);
5145 while (insn)
5147 rtx_insn *next = NEXT_INSN (insn);
5148 add_insn (insn);
5149 last = insn;
5150 insn = next;
5152 break;
5154 #ifdef ENABLE_RTL_CHECKING
5155 case JUMP_TABLE_DATA:
5156 case SEQUENCE:
5157 gcc_unreachable ();
5158 break;
5159 #endif
5161 default:
5162 last = make_jump_insn_raw (x);
5163 add_insn (last);
5164 break;
5167 return last;
5170 /* Make an insn of code CALL_INSN with pattern X
5171 and add it to the end of the doubly-linked list. */
5173 rtx_insn *
5174 emit_call_insn (rtx x)
5176 rtx_insn *insn;
5178 switch (GET_CODE (x))
5180 case DEBUG_INSN:
5181 case INSN:
5182 case JUMP_INSN:
5183 case CALL_INSN:
5184 case CODE_LABEL:
5185 case BARRIER:
5186 case NOTE:
5187 insn = emit_insn (x);
5188 break;
5190 #ifdef ENABLE_RTL_CHECKING
5191 case SEQUENCE:
5192 case JUMP_TABLE_DATA:
5193 gcc_unreachable ();
5194 break;
5195 #endif
5197 default:
5198 insn = make_call_insn_raw (x);
5199 add_insn (insn);
5200 break;
5203 return insn;
5206 /* Add the label LABEL to the end of the doubly-linked list. */
5208 rtx_code_label *
5209 emit_label (rtx uncast_label)
5211 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5213 gcc_checking_assert (INSN_UID (label) == 0);
5214 INSN_UID (label) = cur_insn_uid++;
5215 add_insn (label);
5216 return label;
5219 /* Make an insn of code JUMP_TABLE_DATA
5220 and add it to the end of the doubly-linked list. */
5222 rtx_jump_table_data *
5223 emit_jump_table_data (rtx table)
5225 rtx_jump_table_data *jump_table_data =
5226 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5227 INSN_UID (jump_table_data) = cur_insn_uid++;
5228 PATTERN (jump_table_data) = table;
5229 BLOCK_FOR_INSN (jump_table_data) = NULL;
5230 add_insn (jump_table_data);
5231 return jump_table_data;
5234 /* Make an insn of code BARRIER
5235 and add it to the end of the doubly-linked list. */
5237 rtx_barrier *
5238 emit_barrier (void)
5240 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5241 INSN_UID (barrier) = cur_insn_uid++;
5242 add_insn (barrier);
5243 return barrier;
5246 /* Emit a copy of note ORIG. */
5248 rtx_note *
5249 emit_note_copy (rtx_note *orig)
5251 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5252 rtx_note *note = make_note_raw (kind);
5253 NOTE_DATA (note) = NOTE_DATA (orig);
5254 add_insn (note);
5255 return note;
5258 /* Make an insn of code NOTE or type NOTE_NO
5259 and add it to the end of the doubly-linked list. */
5261 rtx_note *
5262 emit_note (enum insn_note kind)
5264 rtx_note *note = make_note_raw (kind);
5265 add_insn (note);
5266 return note;
5269 /* Emit a clobber of lvalue X. */
5271 rtx_insn *
5272 emit_clobber (rtx x)
5274 /* CONCATs should not appear in the insn stream. */
5275 if (GET_CODE (x) == CONCAT)
5277 emit_clobber (XEXP (x, 0));
5278 return emit_clobber (XEXP (x, 1));
5280 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5283 /* Return a sequence of insns to clobber lvalue X. */
5285 rtx_insn *
5286 gen_clobber (rtx x)
5288 rtx_insn *seq;
5290 start_sequence ();
5291 emit_clobber (x);
5292 seq = get_insns ();
5293 end_sequence ();
5294 return seq;
5297 /* Emit a use of rvalue X. */
5299 rtx_insn *
5300 emit_use (rtx x)
5302 /* CONCATs should not appear in the insn stream. */
5303 if (GET_CODE (x) == CONCAT)
5305 emit_use (XEXP (x, 0));
5306 return emit_use (XEXP (x, 1));
5308 return emit_insn (gen_rtx_USE (VOIDmode, x));
5311 /* Return a sequence of insns to use rvalue X. */
5313 rtx_insn *
5314 gen_use (rtx x)
5316 rtx_insn *seq;
5318 start_sequence ();
5319 emit_use (x);
5320 seq = get_insns ();
5321 end_sequence ();
5322 return seq;
5325 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5326 Return the set in INSN that such notes describe, or NULL if the notes
5327 have no meaning for INSN. */
5330 set_for_reg_notes (rtx insn)
5332 rtx pat, reg;
5334 if (!INSN_P (insn))
5335 return NULL_RTX;
5337 pat = PATTERN (insn);
5338 if (GET_CODE (pat) == PARALLEL)
5340 /* We do not use single_set because that ignores SETs of unused
5341 registers. REG_EQUAL and REG_EQUIV notes really do require the
5342 PARALLEL to have a single SET. */
5343 if (multiple_sets (insn))
5344 return NULL_RTX;
5345 pat = XVECEXP (pat, 0, 0);
5348 if (GET_CODE (pat) != SET)
5349 return NULL_RTX;
5351 reg = SET_DEST (pat);
5353 /* Notes apply to the contents of a STRICT_LOW_PART. */
5354 if (GET_CODE (reg) == STRICT_LOW_PART
5355 || GET_CODE (reg) == ZERO_EXTRACT)
5356 reg = XEXP (reg, 0);
5358 /* Check that we have a register. */
5359 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5360 return NULL_RTX;
5362 return pat;
5365 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5366 note of this type already exists, remove it first. */
5369 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5371 rtx note = find_reg_note (insn, kind, NULL_RTX);
5373 switch (kind)
5375 case REG_EQUAL:
5376 case REG_EQUIV:
5377 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5378 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5379 return NULL_RTX;
5381 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5382 It serves no useful purpose and breaks eliminate_regs. */
5383 if (GET_CODE (datum) == ASM_OPERANDS)
5384 return NULL_RTX;
5386 /* Notes with side effects are dangerous. Even if the side-effect
5387 initially mirrors one in PATTERN (INSN), later optimizations
5388 might alter the way that the final register value is calculated
5389 and so move or alter the side-effect in some way. The note would
5390 then no longer be a valid substitution for SET_SRC. */
5391 if (side_effects_p (datum))
5392 return NULL_RTX;
5393 break;
5395 default:
5396 break;
5399 if (note)
5400 XEXP (note, 0) = datum;
5401 else
5403 add_reg_note (insn, kind, datum);
5404 note = REG_NOTES (insn);
5407 switch (kind)
5409 case REG_EQUAL:
5410 case REG_EQUIV:
5411 df_notes_rescan (as_a <rtx_insn *> (insn));
5412 break;
5413 default:
5414 break;
5417 return note;
5420 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5422 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5424 rtx set = set_for_reg_notes (insn);
5426 if (set && SET_DEST (set) == dst)
5427 return set_unique_reg_note (insn, kind, datum);
5428 return NULL_RTX;
5431 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5432 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5433 is true.
5435 If X is a label, it is simply added into the insn chain. */
5437 rtx_insn *
5438 emit (rtx x, bool allow_barrier_p)
5440 enum rtx_code code = classify_insn (x);
5442 switch (code)
5444 case CODE_LABEL:
5445 return emit_label (x);
5446 case INSN:
5447 return emit_insn (x);
5448 case JUMP_INSN:
5450 rtx_insn *insn = emit_jump_insn (x);
5451 if (allow_barrier_p
5452 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5453 return emit_barrier ();
5454 return insn;
5456 case CALL_INSN:
5457 return emit_call_insn (x);
5458 case DEBUG_INSN:
5459 return emit_debug_insn (x);
5460 default:
5461 gcc_unreachable ();
5465 /* Space for free sequence stack entries. */
5466 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5468 /* Begin emitting insns to a sequence. If this sequence will contain
5469 something that might cause the compiler to pop arguments to function
5470 calls (because those pops have previously been deferred; see
5471 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5472 before calling this function. That will ensure that the deferred
5473 pops are not accidentally emitted in the middle of this sequence. */
5475 void
5476 start_sequence (void)
5478 struct sequence_stack *tem;
5480 if (free_sequence_stack != NULL)
5482 tem = free_sequence_stack;
5483 free_sequence_stack = tem->next;
5485 else
5486 tem = ggc_alloc<sequence_stack> ();
5488 tem->next = get_current_sequence ()->next;
5489 tem->first = get_insns ();
5490 tem->last = get_last_insn ();
5491 get_current_sequence ()->next = tem;
5493 set_first_insn (0);
5494 set_last_insn (0);
5497 /* Set up the insn chain starting with FIRST as the current sequence,
5498 saving the previously current one. See the documentation for
5499 start_sequence for more information about how to use this function. */
5501 void
5502 push_to_sequence (rtx_insn *first)
5504 rtx_insn *last;
5506 start_sequence ();
5508 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5511 set_first_insn (first);
5512 set_last_insn (last);
5515 /* Like push_to_sequence, but take the last insn as an argument to avoid
5516 looping through the list. */
5518 void
5519 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5521 start_sequence ();
5523 set_first_insn (first);
5524 set_last_insn (last);
5527 /* Set up the outer-level insn chain
5528 as the current sequence, saving the previously current one. */
5530 void
5531 push_topmost_sequence (void)
5533 struct sequence_stack *top;
5535 start_sequence ();
5537 top = get_topmost_sequence ();
5538 set_first_insn (top->first);
5539 set_last_insn (top->last);
5542 /* After emitting to the outer-level insn chain, update the outer-level
5543 insn chain, and restore the previous saved state. */
5545 void
5546 pop_topmost_sequence (void)
5548 struct sequence_stack *top;
5550 top = get_topmost_sequence ();
5551 top->first = get_insns ();
5552 top->last = get_last_insn ();
5554 end_sequence ();
5557 /* After emitting to a sequence, restore previous saved state.
5559 To get the contents of the sequence just made, you must call
5560 `get_insns' *before* calling here.
5562 If the compiler might have deferred popping arguments while
5563 generating this sequence, and this sequence will not be immediately
5564 inserted into the instruction stream, use do_pending_stack_adjust
5565 before calling get_insns. That will ensure that the deferred
5566 pops are inserted into this sequence, and not into some random
5567 location in the instruction stream. See INHIBIT_DEFER_POP for more
5568 information about deferred popping of arguments. */
5570 void
5571 end_sequence (void)
5573 struct sequence_stack *tem = get_current_sequence ()->next;
5575 set_first_insn (tem->first);
5576 set_last_insn (tem->last);
5577 get_current_sequence ()->next = tem->next;
5579 memset (tem, 0, sizeof (*tem));
5580 tem->next = free_sequence_stack;
5581 free_sequence_stack = tem;
5584 /* Return 1 if currently emitting into a sequence. */
5587 in_sequence_p (void)
5589 return get_current_sequence ()->next != 0;
5592 /* Put the various virtual registers into REGNO_REG_RTX. */
5594 static void
5595 init_virtual_regs (void)
5597 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5598 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5599 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5600 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5601 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5602 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5603 = virtual_preferred_stack_boundary_rtx;
5607 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5608 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5609 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5610 static int copy_insn_n_scratches;
5612 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5613 copied an ASM_OPERANDS.
5614 In that case, it is the original input-operand vector. */
5615 static rtvec orig_asm_operands_vector;
5617 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5618 copied an ASM_OPERANDS.
5619 In that case, it is the copied input-operand vector. */
5620 static rtvec copy_asm_operands_vector;
5622 /* Likewise for the constraints vector. */
5623 static rtvec orig_asm_constraints_vector;
5624 static rtvec copy_asm_constraints_vector;
5626 /* Recursively create a new copy of an rtx for copy_insn.
5627 This function differs from copy_rtx in that it handles SCRATCHes and
5628 ASM_OPERANDs properly.
5629 Normally, this function is not used directly; use copy_insn as front end.
5630 However, you could first copy an insn pattern with copy_insn and then use
5631 this function afterwards to properly copy any REG_NOTEs containing
5632 SCRATCHes. */
5635 copy_insn_1 (rtx orig)
5637 rtx copy;
5638 int i, j;
5639 RTX_CODE code;
5640 const char *format_ptr;
5642 if (orig == NULL)
5643 return NULL;
5645 code = GET_CODE (orig);
5647 switch (code)
5649 case REG:
5650 case DEBUG_EXPR:
5651 CASE_CONST_ANY:
5652 case SYMBOL_REF:
5653 case CODE_LABEL:
5654 case PC:
5655 case RETURN:
5656 case SIMPLE_RETURN:
5657 return orig;
5658 case CLOBBER:
5659 /* Share clobbers of hard registers, but do not share pseudo reg
5660 clobbers or clobbers of hard registers that originated as pseudos.
5661 This is needed to allow safe register renaming. */
5662 if (REG_P (XEXP (orig, 0))
5663 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5664 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5665 return orig;
5666 break;
5668 case SCRATCH:
5669 for (i = 0; i < copy_insn_n_scratches; i++)
5670 if (copy_insn_scratch_in[i] == orig)
5671 return copy_insn_scratch_out[i];
5672 break;
5674 case CONST:
5675 if (shared_const_p (orig))
5676 return orig;
5677 break;
5679 /* A MEM with a constant address is not sharable. The problem is that
5680 the constant address may need to be reloaded. If the mem is shared,
5681 then reloading one copy of this mem will cause all copies to appear
5682 to have been reloaded. */
5684 default:
5685 break;
5688 /* Copy the various flags, fields, and other information. We assume
5689 that all fields need copying, and then clear the fields that should
5690 not be copied. That is the sensible default behavior, and forces
5691 us to explicitly document why we are *not* copying a flag. */
5692 copy = shallow_copy_rtx (orig);
5694 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5695 if (INSN_P (orig))
5697 RTX_FLAG (copy, jump) = 0;
5698 RTX_FLAG (copy, call) = 0;
5699 RTX_FLAG (copy, frame_related) = 0;
5702 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5704 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5705 switch (*format_ptr++)
5707 case 'e':
5708 if (XEXP (orig, i) != NULL)
5709 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5710 break;
5712 case 'E':
5713 case 'V':
5714 if (XVEC (orig, i) == orig_asm_constraints_vector)
5715 XVEC (copy, i) = copy_asm_constraints_vector;
5716 else if (XVEC (orig, i) == orig_asm_operands_vector)
5717 XVEC (copy, i) = copy_asm_operands_vector;
5718 else if (XVEC (orig, i) != NULL)
5720 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5721 for (j = 0; j < XVECLEN (copy, i); j++)
5722 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5724 break;
5726 case 't':
5727 case 'w':
5728 case 'i':
5729 case 'p':
5730 case 's':
5731 case 'S':
5732 case 'u':
5733 case '0':
5734 /* These are left unchanged. */
5735 break;
5737 default:
5738 gcc_unreachable ();
5741 if (code == SCRATCH)
5743 i = copy_insn_n_scratches++;
5744 gcc_assert (i < MAX_RECOG_OPERANDS);
5745 copy_insn_scratch_in[i] = orig;
5746 copy_insn_scratch_out[i] = copy;
5748 else if (code == ASM_OPERANDS)
5750 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5751 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5752 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5753 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5756 return copy;
5759 /* Create a new copy of an rtx.
5760 This function differs from copy_rtx in that it handles SCRATCHes and
5761 ASM_OPERANDs properly.
5762 INSN doesn't really have to be a full INSN; it could be just the
5763 pattern. */
5765 copy_insn (rtx insn)
5767 copy_insn_n_scratches = 0;
5768 orig_asm_operands_vector = 0;
5769 orig_asm_constraints_vector = 0;
5770 copy_asm_operands_vector = 0;
5771 copy_asm_constraints_vector = 0;
5772 return copy_insn_1 (insn);
5775 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5776 on that assumption that INSN itself remains in its original place. */
5778 rtx_insn *
5779 copy_delay_slot_insn (rtx_insn *insn)
5781 /* Copy INSN with its rtx_code, all its notes, location etc. */
5782 insn = as_a <rtx_insn *> (copy_rtx (insn));
5783 INSN_UID (insn) = cur_insn_uid++;
5784 return insn;
5787 /* Initialize data structures and variables in this file
5788 before generating rtl for each function. */
5790 void
5791 init_emit (void)
5793 set_first_insn (NULL);
5794 set_last_insn (NULL);
5795 if (param_min_nondebug_insn_uid)
5796 cur_insn_uid = param_min_nondebug_insn_uid;
5797 else
5798 cur_insn_uid = 1;
5799 cur_debug_insn_uid = 1;
5800 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5801 first_label_num = label_num;
5802 get_current_sequence ()->next = NULL;
5804 /* Init the tables that describe all the pseudo regs. */
5806 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5808 crtl->emit.regno_pointer_align
5809 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5811 regno_reg_rtx
5812 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5814 /* Put copies of all the hard registers into regno_reg_rtx. */
5815 memcpy (regno_reg_rtx,
5816 initial_regno_reg_rtx,
5817 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5819 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5820 init_virtual_regs ();
5822 /* Indicate that the virtual registers and stack locations are
5823 all pointers. */
5824 REG_POINTER (stack_pointer_rtx) = 1;
5825 REG_POINTER (frame_pointer_rtx) = 1;
5826 REG_POINTER (hard_frame_pointer_rtx) = 1;
5827 REG_POINTER (arg_pointer_rtx) = 1;
5829 REG_POINTER (virtual_incoming_args_rtx) = 1;
5830 REG_POINTER (virtual_stack_vars_rtx) = 1;
5831 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5832 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5833 REG_POINTER (virtual_cfa_rtx) = 1;
5835 #ifdef STACK_BOUNDARY
5836 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5837 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5838 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5839 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5841 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5842 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5843 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5844 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5846 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5847 #endif
5849 #ifdef INIT_EXPANDERS
5850 INIT_EXPANDERS;
5851 #endif
5854 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5856 wide_int
5857 const_vector_int_elt (const_rtx x, unsigned int i)
5859 /* First handle elements that are directly encoded. */
5860 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5861 if (i < (unsigned int) XVECLEN (x, 0))
5862 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5864 /* Identify the pattern that contains element I and work out the index of
5865 the last encoded element for that pattern. */
5866 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5867 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5868 unsigned int count = i / npatterns;
5869 unsigned int pattern = i % npatterns;
5870 unsigned int final_i = encoded_nelts - npatterns + pattern;
5872 /* If there are no steps, the final encoded value is the right one. */
5873 if (!CONST_VECTOR_STEPPED_P (x))
5874 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5876 /* Otherwise work out the value from the last two encoded elements. */
5877 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5878 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5879 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5880 rtx_mode_t (v1, elt_mode));
5881 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5884 /* Return the value of element I of CONST_VECTOR X. */
5887 const_vector_elt (const_rtx x, unsigned int i)
5889 /* First handle elements that are directly encoded. */
5890 if (i < (unsigned int) XVECLEN (x, 0))
5891 return CONST_VECTOR_ENCODED_ELT (x, i);
5893 /* If there are no steps, the final encoded value is the right one. */
5894 if (!CONST_VECTOR_STEPPED_P (x))
5896 /* Identify the pattern that contains element I and work out the index of
5897 the last encoded element for that pattern. */
5898 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5899 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5900 unsigned int pattern = i % npatterns;
5901 unsigned int final_i = encoded_nelts - npatterns + pattern;
5902 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5905 /* Otherwise work out the value from the last two encoded elements. */
5906 return immed_wide_int_const (const_vector_int_elt (x, i),
5907 GET_MODE_INNER (GET_MODE (x)));
5910 /* Return true if X is a valid element for a CONST_VECTOR of the given
5911 mode. */
5913 bool
5914 valid_for_const_vector_p (machine_mode, rtx x)
5916 return (CONST_SCALAR_INT_P (x)
5917 || CONST_POLY_INT_P (x)
5918 || CONST_DOUBLE_AS_FLOAT_P (x)
5919 || CONST_FIXED_P (x));
5922 /* Generate a vector constant of mode MODE in which every element has
5923 value ELT. */
5926 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5928 rtx_vector_builder builder (mode, 1, 1);
5929 builder.quick_push (elt);
5930 return builder.build ();
5933 /* Return a vector rtx of mode MODE in which every element has value X.
5934 The result will be a constant if X is constant. */
5937 gen_vec_duplicate (machine_mode mode, rtx x)
5939 if (valid_for_const_vector_p (mode, x))
5940 return gen_const_vec_duplicate (mode, x);
5941 return gen_rtx_VEC_DUPLICATE (mode, x);
5944 /* A subroutine of const_vec_series_p that handles the case in which:
5946 (GET_CODE (X) == CONST_VECTOR
5947 && CONST_VECTOR_NPATTERNS (X) == 1
5948 && !CONST_VECTOR_DUPLICATE_P (X))
5950 is known to hold. */
5952 bool
5953 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5955 /* Stepped sequences are only defined for integers, to avoid specifying
5956 rounding behavior. */
5957 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5958 return false;
5960 /* A non-duplicated vector with two elements can always be seen as a
5961 series with a nonzero step. Longer vectors must have a stepped
5962 encoding. */
5963 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5964 && !CONST_VECTOR_STEPPED_P (x))
5965 return false;
5967 /* Calculate the step between the first and second elements. */
5968 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5969 rtx base = CONST_VECTOR_ELT (x, 0);
5970 rtx step = simplify_binary_operation (MINUS, inner,
5971 CONST_VECTOR_ENCODED_ELT (x, 1), base);
5972 if (rtx_equal_p (step, CONST0_RTX (inner)))
5973 return false;
5975 /* If we have a stepped encoding, check that the step between the
5976 second and third elements is the same as STEP. */
5977 if (CONST_VECTOR_STEPPED_P (x))
5979 rtx diff = simplify_binary_operation (MINUS, inner,
5980 CONST_VECTOR_ENCODED_ELT (x, 2),
5981 CONST_VECTOR_ENCODED_ELT (x, 1));
5982 if (!rtx_equal_p (step, diff))
5983 return false;
5986 *base_out = base;
5987 *step_out = step;
5988 return true;
5991 /* Generate a vector constant of mode MODE in which element I has
5992 the value BASE + I * STEP. */
5995 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5997 gcc_assert (valid_for_const_vector_p (mode, base)
5998 && valid_for_const_vector_p (mode, step));
6000 rtx_vector_builder builder (mode, 1, 3);
6001 builder.quick_push (base);
6002 for (int i = 1; i < 3; ++i)
6003 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6004 builder[i - 1], step));
6005 return builder.build ();
6008 /* Generate a vector of mode MODE in which element I has the value
6009 BASE + I * STEP. The result will be a constant if BASE and STEP
6010 are both constants. */
6013 gen_vec_series (machine_mode mode, rtx base, rtx step)
6015 if (step == const0_rtx)
6016 return gen_vec_duplicate (mode, base);
6017 if (valid_for_const_vector_p (mode, base)
6018 && valid_for_const_vector_p (mode, step))
6019 return gen_const_vec_series (mode, base, step);
6020 return gen_rtx_VEC_SERIES (mode, base, step);
6023 /* Generate a new vector constant for mode MODE and constant value
6024 CONSTANT. */
6026 static rtx
6027 gen_const_vector (machine_mode mode, int constant)
6029 machine_mode inner = GET_MODE_INNER (mode);
6031 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6033 rtx el = const_tiny_rtx[constant][(int) inner];
6034 gcc_assert (el);
6036 return gen_const_vec_duplicate (mode, el);
6039 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6040 all elements are zero, and the one vector when all elements are one. */
6042 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6044 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6046 /* If the values are all the same, check to see if we can use one of the
6047 standard constant vectors. */
6048 if (rtvec_all_equal_p (v))
6049 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6051 unsigned int nunits = GET_NUM_ELEM (v);
6052 rtx_vector_builder builder (mode, nunits, 1);
6053 for (unsigned int i = 0; i < nunits; ++i)
6054 builder.quick_push (RTVEC_ELT (v, i));
6055 return builder.build (v);
6058 /* Initialise global register information required by all functions. */
6060 void
6061 init_emit_regs (void)
6063 int i;
6064 machine_mode mode;
6065 mem_attrs *attrs;
6067 /* Reset register attributes */
6068 reg_attrs_htab->empty ();
6070 /* We need reg_raw_mode, so initialize the modes now. */
6071 init_reg_modes_target ();
6073 /* Assign register numbers to the globally defined register rtx. */
6074 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6075 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6076 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6077 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6078 virtual_incoming_args_rtx =
6079 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6080 virtual_stack_vars_rtx =
6081 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6082 virtual_stack_dynamic_rtx =
6083 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6084 virtual_outgoing_args_rtx =
6085 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6086 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6087 virtual_preferred_stack_boundary_rtx =
6088 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6090 /* Initialize RTL for commonly used hard registers. These are
6091 copied into regno_reg_rtx as we begin to compile each function. */
6092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6093 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6095 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6096 return_address_pointer_rtx
6097 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6098 #endif
6100 pic_offset_table_rtx = NULL_RTX;
6101 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6102 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6104 /* Process stack-limiting command-line options. */
6105 if (opt_fstack_limit_symbol_arg != NULL)
6106 stack_limit_rtx
6107 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6108 if (opt_fstack_limit_register_no >= 0)
6109 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6111 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6113 mode = (machine_mode) i;
6114 attrs = ggc_cleared_alloc<mem_attrs> ();
6115 attrs->align = BITS_PER_UNIT;
6116 attrs->addrspace = ADDR_SPACE_GENERIC;
6117 if (mode != BLKmode && mode != VOIDmode)
6119 attrs->size_known_p = true;
6120 attrs->size = GET_MODE_SIZE (mode);
6121 if (STRICT_ALIGNMENT)
6122 attrs->align = GET_MODE_ALIGNMENT (mode);
6124 mode_mem_attrs[i] = attrs;
6127 split_branch_probability = profile_probability::uninitialized ();
6130 /* Initialize global machine_mode variables. */
6132 void
6133 init_derived_machine_modes (void)
6135 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6136 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6138 scalar_int_mode mode = mode_iter.require ();
6140 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6141 && !opt_byte_mode.exists ())
6142 opt_byte_mode = mode;
6144 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6145 && !opt_word_mode.exists ())
6146 opt_word_mode = mode;
6149 byte_mode = opt_byte_mode.require ();
6150 word_mode = opt_word_mode.require ();
6151 ptr_mode = as_a <scalar_int_mode>
6152 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6155 /* Create some permanent unique rtl objects shared between all functions. */
6157 void
6158 init_emit_once (void)
6160 int i;
6161 machine_mode mode;
6162 scalar_float_mode double_mode;
6163 opt_scalar_mode smode_iter;
6165 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6166 CONST_FIXED, and memory attribute hash tables. */
6167 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6169 #if TARGET_SUPPORTS_WIDE_INT
6170 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6171 #endif
6172 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6174 if (NUM_POLY_INT_COEFFS > 1)
6175 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6177 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6179 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6181 #ifdef INIT_EXPANDERS
6182 /* This is to initialize {init|mark|free}_machine_status before the first
6183 call to push_function_context_to. This is needed by the Chill front
6184 end which calls push_function_context_to before the first call to
6185 init_function_start. */
6186 INIT_EXPANDERS;
6187 #endif
6189 /* Create the unique rtx's for certain rtx codes and operand values. */
6191 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6192 tries to use these variables. */
6193 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6194 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6195 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6197 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6198 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6199 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6200 else
6201 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6203 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6205 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6206 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6207 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6209 dconstm1 = dconst1;
6210 dconstm1.sign = 1;
6212 dconsthalf = dconst1;
6213 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6215 real_inf (&dconstinf);
6216 real_inf (&dconstninf, true);
6218 for (i = 0; i < 3; i++)
6220 const REAL_VALUE_TYPE *const r =
6221 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6223 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6224 const_tiny_rtx[i][(int) mode] =
6225 const_double_from_real_value (*r, mode);
6227 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6228 const_tiny_rtx[i][(int) mode] =
6229 const_double_from_real_value (*r, mode);
6231 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6233 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6234 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6236 for (mode = MIN_MODE_PARTIAL_INT;
6237 mode <= MAX_MODE_PARTIAL_INT;
6238 mode = (machine_mode)((int)(mode) + 1))
6239 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6242 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6244 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6245 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6247 /* For BImode, 1 and -1 are unsigned and signed interpretations
6248 of the same value. */
6249 for (mode = MIN_MODE_BOOL;
6250 mode <= MAX_MODE_BOOL;
6251 mode = (machine_mode)((int)(mode) + 1))
6253 const_tiny_rtx[0][(int) mode] = const0_rtx;
6254 if (mode == BImode)
6256 const_tiny_rtx[1][(int) mode] = const_true_rtx;
6257 const_tiny_rtx[3][(int) mode] = const_true_rtx;
6259 else
6261 const_tiny_rtx[1][(int) mode] = const1_rtx;
6262 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6266 for (mode = MIN_MODE_PARTIAL_INT;
6267 mode <= MAX_MODE_PARTIAL_INT;
6268 mode = (machine_mode)((int)(mode) + 1))
6269 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6271 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6273 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6274 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6277 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6279 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6280 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6283 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6285 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6286 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6287 if (GET_MODE_INNER (mode) == BImode)
6288 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6289 interpretations of the same value. */
6290 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6291 else
6292 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6295 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6297 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6298 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6299 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6302 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6304 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6305 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6308 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6310 scalar_mode smode = smode_iter.require ();
6311 FCONST0 (smode).data.high = 0;
6312 FCONST0 (smode).data.low = 0;
6313 FCONST0 (smode).mode = smode;
6314 const_tiny_rtx[0][(int) smode]
6315 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6318 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6320 scalar_mode smode = smode_iter.require ();
6321 FCONST0 (smode).data.high = 0;
6322 FCONST0 (smode).data.low = 0;
6323 FCONST0 (smode).mode = smode;
6324 const_tiny_rtx[0][(int) smode]
6325 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6328 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6330 scalar_mode smode = smode_iter.require ();
6331 FCONST0 (smode).data.high = 0;
6332 FCONST0 (smode).data.low = 0;
6333 FCONST0 (smode).mode = smode;
6334 const_tiny_rtx[0][(int) smode]
6335 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6337 /* We store the value 1. */
6338 FCONST1 (smode).data.high = 0;
6339 FCONST1 (smode).data.low = 0;
6340 FCONST1 (smode).mode = smode;
6341 FCONST1 (smode).data
6342 = double_int_one.lshift (GET_MODE_FBIT (smode),
6343 HOST_BITS_PER_DOUBLE_INT,
6344 SIGNED_FIXED_POINT_MODE_P (smode));
6345 const_tiny_rtx[1][(int) smode]
6346 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6349 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6351 scalar_mode smode = smode_iter.require ();
6352 FCONST0 (smode).data.high = 0;
6353 FCONST0 (smode).data.low = 0;
6354 FCONST0 (smode).mode = smode;
6355 const_tiny_rtx[0][(int) smode]
6356 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6358 /* We store the value 1. */
6359 FCONST1 (smode).data.high = 0;
6360 FCONST1 (smode).data.low = 0;
6361 FCONST1 (smode).mode = smode;
6362 FCONST1 (smode).data
6363 = double_int_one.lshift (GET_MODE_FBIT (smode),
6364 HOST_BITS_PER_DOUBLE_INT,
6365 SIGNED_FIXED_POINT_MODE_P (smode));
6366 const_tiny_rtx[1][(int) smode]
6367 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6370 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6372 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6375 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6377 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6380 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6382 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6383 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6386 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6388 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6389 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6392 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6393 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6394 const_tiny_rtx[0][i] = const0_rtx;
6396 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6397 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6398 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6399 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6400 /*prev_insn=*/NULL,
6401 /*next_insn=*/NULL,
6402 /*bb=*/NULL,
6403 /*pattern=*/NULL_RTX,
6404 /*location=*/-1,
6405 CODE_FOR_nothing,
6406 /*reg_notes=*/NULL_RTX);
6409 /* Produce exact duplicate of insn INSN after AFTER.
6410 Care updating of libcall regions if present. */
6412 rtx_insn *
6413 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6415 rtx_insn *new_rtx;
6416 rtx link;
6418 switch (GET_CODE (insn))
6420 case INSN:
6421 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6422 break;
6424 case JUMP_INSN:
6425 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6426 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6427 break;
6429 case DEBUG_INSN:
6430 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6431 break;
6433 case CALL_INSN:
6434 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6435 if (CALL_INSN_FUNCTION_USAGE (insn))
6436 CALL_INSN_FUNCTION_USAGE (new_rtx)
6437 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6438 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6439 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6440 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6441 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6442 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6443 break;
6445 default:
6446 gcc_unreachable ();
6449 /* Update LABEL_NUSES. */
6450 if (NONDEBUG_INSN_P (insn))
6451 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6453 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6455 /* If the old insn is frame related, then so is the new one. This is
6456 primarily needed for IA-64 unwind info which marks epilogue insns,
6457 which may be duplicated by the basic block reordering code. */
6458 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6460 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6461 rtx *ptail = &REG_NOTES (new_rtx);
6462 while (*ptail != NULL_RTX)
6463 ptail = &XEXP (*ptail, 1);
6465 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6466 will make them. REG_LABEL_TARGETs are created there too, but are
6467 supposed to be sticky, so we copy them. */
6468 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6469 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6471 *ptail = duplicate_reg_note (link);
6472 ptail = &XEXP (*ptail, 1);
6475 INSN_CODE (new_rtx) = INSN_CODE (insn);
6476 return new_rtx;
6479 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6481 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6483 if (hard_reg_clobbers[mode][regno])
6484 return hard_reg_clobbers[mode][regno];
6485 else
6486 return (hard_reg_clobbers[mode][regno] =
6487 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6490 location_t prologue_location;
6491 location_t epilogue_location;
6493 /* Hold current location information and last location information, so the
6494 datastructures are built lazily only when some instructions in given
6495 place are needed. */
6496 static location_t curr_location;
6498 /* Allocate insn location datastructure. */
6499 void
6500 insn_locations_init (void)
6502 prologue_location = epilogue_location = 0;
6503 curr_location = UNKNOWN_LOCATION;
6506 /* At the end of emit stage, clear current location. */
6507 void
6508 insn_locations_finalize (void)
6510 epilogue_location = curr_location;
6511 curr_location = UNKNOWN_LOCATION;
6514 /* Set current location. */
6515 void
6516 set_curr_insn_location (location_t location)
6518 curr_location = location;
6521 /* Get current location. */
6522 location_t
6523 curr_insn_location (void)
6525 return curr_location;
6528 /* Set the location of the insn chain starting at INSN to LOC. */
6529 void
6530 set_insn_locations (rtx_insn *insn, location_t loc)
6532 while (insn)
6534 if (INSN_P (insn))
6535 INSN_LOCATION (insn) = loc;
6536 insn = NEXT_INSN (insn);
6540 /* Return lexical scope block insn belongs to. */
6541 tree
6542 insn_scope (const rtx_insn *insn)
6544 return LOCATION_BLOCK (INSN_LOCATION (insn));
6547 /* Return line number of the statement that produced this insn. */
6549 insn_line (const rtx_insn *insn)
6551 return LOCATION_LINE (INSN_LOCATION (insn));
6554 /* Return source file of the statement that produced this insn. */
6555 const char *
6556 insn_file (const rtx_insn *insn)
6558 return LOCATION_FILE (INSN_LOCATION (insn));
6561 /* Return expanded location of the statement that produced this insn. */
6562 expanded_location
6563 insn_location (const rtx_insn *insn)
6565 return expand_location (INSN_LOCATION (insn));
6568 /* Return true if memory model MODEL requires a pre-operation (release-style)
6569 barrier or a post-operation (acquire-style) barrier. While not universal,
6570 this function matches behavior of several targets. */
6572 bool
6573 need_atomic_barrier_p (enum memmodel model, bool pre)
6575 switch (model & MEMMODEL_BASE_MASK)
6577 case MEMMODEL_RELAXED:
6578 case MEMMODEL_CONSUME:
6579 return false;
6580 case MEMMODEL_RELEASE:
6581 return pre;
6582 case MEMMODEL_ACQUIRE:
6583 return !pre;
6584 case MEMMODEL_ACQ_REL:
6585 case MEMMODEL_SEQ_CST:
6586 return true;
6587 default:
6588 gcc_unreachable ();
6592 /* Return a constant shift amount for shifting a value of mode MODE
6593 by VALUE bits. */
6596 gen_int_shift_amount (machine_mode, poly_int64 value)
6598 /* Use a 64-bit mode, to avoid any truncation.
6600 ??? Perhaps this should be automatically derived from the .md files
6601 instead, or perhaps have a target hook. */
6602 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6603 ? DImode
6604 : int_mode_for_size (64, 0).require ());
6605 return gen_int_mode (value, shift_mode);
6608 /* Initialize fields of rtl_data related to stack alignment. */
6610 void
6611 rtl_data::init_stack_alignment ()
6613 stack_alignment_needed = STACK_BOUNDARY;
6614 max_used_stack_slot_alignment = STACK_BOUNDARY;
6615 stack_alignment_estimated = 0;
6616 preferred_stack_boundary = STACK_BOUNDARY;
6620 #include "gt-emit-rtl.h"