ubsan: ubsan_maybe_instrument_array_ref tweak
[official-gcc.git] / gcc / emit-rtl.cc
blob4036f4b64f76911b2e1e5ce5bc082dfd88a73d0a
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.cc, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
80 /* Datastructures maintained for currently processed function in RTL form. */
82 struct rtl_data x_rtl;
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
89 rtx * regno_reg_rtx;
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
94 static GTY(()) int label_num = 1;
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
103 rtx const_true_rtx;
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm0;
109 REAL_VALUE_TYPE dconstm1;
110 REAL_VALUE_TYPE dconsthalf;
111 REAL_VALUE_TYPE dconstinf;
112 REAL_VALUE_TYPE dconstninf;
114 /* Record fixed-point constant 0 and 1. */
115 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
118 /* We make one copy of (const_int C) where C is in
119 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
120 to save space during the compilation and simplify comparisons of
121 integers. */
123 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
125 /* Standard pieces of rtx, to be substituted directly into things. */
126 rtx pc_rtx;
127 rtx ret_rtx;
128 rtx simple_return_rtx;
130 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133 rtx_insn *invalid_insn_rtx;
135 /* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
138 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
140 typedef HOST_WIDE_INT compare_type;
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
146 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
148 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
154 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
156 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
158 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
160 static hashval_t hash (rtx x);
161 static bool equal (rtx x, const compare_type &y);
164 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
166 /* A hash table storing register attribute structures. */
167 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
169 static hashval_t hash (reg_attrs *x);
170 static bool equal (reg_attrs *a, reg_attrs *b);
173 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
175 /* A hash table storing all CONST_DOUBLEs. */
176 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
178 static hashval_t hash (rtx x);
179 static bool equal (rtx x, rtx y);
182 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
184 /* A hash table storing all CONST_FIXEDs. */
185 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
187 static hashval_t hash (rtx x);
188 static bool equal (rtx x, rtx y);
191 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
193 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
194 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
195 #define first_label_num (crtl->emit.x_first_label_num)
197 static void set_used_decls (tree);
198 static void mark_label_nuses (rtx);
199 #if TARGET_SUPPORTS_WIDE_INT
200 static rtx lookup_const_wide_int (rtx);
201 #endif
202 static rtx lookup_const_double (rtx);
203 static rtx lookup_const_fixed (rtx);
204 static rtx gen_const_vector (machine_mode, int);
205 static void copy_rtx_if_shared_1 (rtx *orig);
207 /* Probability of the conditional branch currently proceeded by try_split. */
208 profile_probability split_branch_probability;
210 /* Returns a hash code for X (which is a really a CONST_INT). */
212 hashval_t
213 const_int_hasher::hash (rtx x)
215 return (hashval_t) INTVAL (x);
218 /* Returns nonzero if the value represented by X (which is really a
219 CONST_INT) is the same as that given by Y (which is really a
220 HOST_WIDE_INT *). */
222 bool
223 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
225 return (INTVAL (x) == y);
228 #if TARGET_SUPPORTS_WIDE_INT
229 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
231 hashval_t
232 const_wide_int_hasher::hash (rtx x)
234 int i;
235 unsigned HOST_WIDE_INT hash = 0;
236 const_rtx xr = x;
238 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
239 hash += CONST_WIDE_INT_ELT (xr, i);
241 return (hashval_t) hash;
244 /* Returns nonzero if the value represented by X (which is really a
245 CONST_WIDE_INT) is the same as that given by Y (which is really a
246 CONST_WIDE_INT). */
248 bool
249 const_wide_int_hasher::equal (rtx x, rtx y)
251 int i;
252 const_rtx xr = x;
253 const_rtx yr = y;
254 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
255 return false;
257 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
258 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
259 return false;
261 return true;
263 #endif
265 /* Returns a hash code for CONST_POLY_INT X. */
267 hashval_t
268 const_poly_int_hasher::hash (rtx x)
270 inchash::hash h;
271 h.add_int (GET_MODE (x));
272 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
273 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
274 return h.end ();
277 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
279 bool
280 const_poly_int_hasher::equal (rtx x, const compare_type &y)
282 if (GET_MODE (x) != y.first)
283 return false;
284 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
285 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
286 return false;
287 return true;
290 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
291 hashval_t
292 const_double_hasher::hash (rtx x)
294 const_rtx const value = x;
295 hashval_t h;
297 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
298 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
299 else
301 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
302 /* MODE is used in the comparison, so it should be in the hash. */
303 h ^= GET_MODE (value);
305 return h;
308 /* Returns nonzero if the value represented by X (really a ...)
309 is the same as that represented by Y (really a ...) */
310 bool
311 const_double_hasher::equal (rtx x, rtx y)
313 const_rtx const a = x, b = y;
315 if (GET_MODE (a) != GET_MODE (b))
316 return 0;
317 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
318 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
319 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
320 else
321 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
322 CONST_DOUBLE_REAL_VALUE (b));
325 /* Returns a hash code for X (which is really a CONST_FIXED). */
327 hashval_t
328 const_fixed_hasher::hash (rtx x)
330 const_rtx const value = x;
331 hashval_t h;
333 h = fixed_hash (CONST_FIXED_VALUE (value));
334 /* MODE is used in the comparison, so it should be in the hash. */
335 h ^= GET_MODE (value);
336 return h;
339 /* Returns nonzero if the value represented by X is the same as that
340 represented by Y. */
342 bool
343 const_fixed_hasher::equal (rtx x, rtx y)
345 const_rtx const a = x, b = y;
347 if (GET_MODE (a) != GET_MODE (b))
348 return 0;
349 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
352 /* Return true if the given memory attributes are equal. */
354 bool
355 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
357 if (p == q)
358 return true;
359 if (!p || !q)
360 return false;
361 return (p->alias == q->alias
362 && p->offset_known_p == q->offset_known_p
363 && (!p->offset_known_p || known_eq (p->offset, q->offset))
364 && p->size_known_p == q->size_known_p
365 && (!p->size_known_p || known_eq (p->size, q->size))
366 && p->align == q->align
367 && p->addrspace == q->addrspace
368 && (p->expr == q->expr
369 || (p->expr != NULL_TREE && q->expr != NULL_TREE
370 && operand_equal_p (p->expr, q->expr, 0))));
373 /* Set MEM's memory attributes so that they are the same as ATTRS. */
375 static void
376 set_mem_attrs (rtx mem, mem_attrs *attrs)
378 /* If everything is the default, we can just clear the attributes. */
379 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
381 MEM_ATTRS (mem) = 0;
382 return;
385 if (!MEM_ATTRS (mem)
386 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
388 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
389 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
393 /* Returns a hash code for X (which is a really a reg_attrs *). */
395 hashval_t
396 reg_attr_hasher::hash (reg_attrs *x)
398 const reg_attrs *const p = x;
400 inchash::hash h;
401 h.add_ptr (p->decl);
402 h.add_poly_hwi (p->offset);
403 return h.end ();
406 /* Returns nonzero if the value represented by X is the same as that given by
407 Y. */
409 bool
410 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
412 const reg_attrs *const p = x;
413 const reg_attrs *const q = y;
415 return (p->decl == q->decl && known_eq (p->offset, q->offset));
417 /* Allocate a new reg_attrs structure and insert it into the hash table if
418 one identical to it is not already in the table. We are doing this for
419 MEM of mode MODE. */
421 static reg_attrs *
422 get_reg_attrs (tree decl, poly_int64 offset)
424 reg_attrs attrs;
426 /* If everything is the default, we can just return zero. */
427 if (decl == 0 && known_eq (offset, 0))
428 return 0;
430 attrs.decl = decl;
431 attrs.offset = offset;
433 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
434 if (*slot == 0)
436 *slot = ggc_alloc<reg_attrs> ();
437 memcpy (*slot, &attrs, sizeof (reg_attrs));
440 return *slot;
444 #if !HAVE_blockage
445 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
446 and to block register equivalences to be seen across this insn. */
449 gen_blockage (void)
451 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
452 MEM_VOLATILE_P (x) = true;
453 return x;
455 #endif
458 /* Set the mode and register number of X to MODE and REGNO. */
460 void
461 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
463 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
464 ? hard_regno_nregs (regno, mode)
465 : 1);
466 PUT_MODE_RAW (x, mode);
467 set_regno_raw (x, regno, nregs);
470 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
473 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
475 set_mode_and_regno (x, mode, regno);
476 REG_ATTRS (x) = NULL;
477 ORIGINAL_REGNO (x) = regno;
478 return x;
481 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
482 don't attempt to share with the various global pieces of rtl (such as
483 frame_pointer_rtx). */
486 gen_raw_REG (machine_mode mode, unsigned int regno)
488 rtx x = rtx_alloc (REG MEM_STAT_INFO);
489 init_raw_REG (x, mode, regno);
490 return x;
493 /* There are some RTL codes that require special attention; the generation
494 functions do the raw handling. If you add to this list, modify
495 special_rtx in gengenrtl.cc as well. */
497 rtx_expr_list *
498 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
500 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
501 expr_list));
504 rtx_insn_list *
505 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
507 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
508 insn_list));
511 rtx_insn *
512 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
513 basic_block bb, rtx pattern, int location, int code,
514 rtx reg_notes)
516 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
517 prev_insn, next_insn,
518 bb, pattern, location, code,
519 reg_notes));
523 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
525 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
526 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
528 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
529 if (const_true_rtx && arg == STORE_FLAG_VALUE)
530 return const_true_rtx;
531 #endif
533 /* Look up the CONST_INT in the hash table. */
534 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
535 INSERT);
536 if (*slot == 0)
537 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
539 return *slot;
543 gen_int_mode (poly_int64 c, machine_mode mode)
545 c = trunc_int_for_mode (c, mode);
546 if (c.is_constant ())
547 return GEN_INT (c.coeffs[0]);
548 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
549 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
552 /* CONST_DOUBLEs might be created from pairs of integers, or from
553 REAL_VALUE_TYPEs. Also, their length is known only at run time,
554 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
556 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
557 hash table. If so, return its counterpart; otherwise add it
558 to the hash table and return it. */
559 static rtx
560 lookup_const_double (rtx real)
562 rtx *slot = const_double_htab->find_slot (real, INSERT);
563 if (*slot == 0)
564 *slot = real;
566 return *slot;
569 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
570 VALUE in mode MODE. */
572 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
574 rtx real = rtx_alloc (CONST_DOUBLE);
575 PUT_MODE (real, mode);
577 real->u.rv = value;
579 return lookup_const_double (real);
582 /* Determine whether FIXED, a CONST_FIXED, already exists in the
583 hash table. If so, return its counterpart; otherwise add it
584 to the hash table and return it. */
586 static rtx
587 lookup_const_fixed (rtx fixed)
589 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
590 if (*slot == 0)
591 *slot = fixed;
593 return *slot;
596 /* Return a CONST_FIXED rtx for a fixed-point value specified by
597 VALUE in mode MODE. */
600 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
602 rtx fixed = rtx_alloc (CONST_FIXED);
603 PUT_MODE (fixed, mode);
605 fixed->u.fv = value;
607 return lookup_const_fixed (fixed);
610 #if TARGET_SUPPORTS_WIDE_INT == 0
611 /* Constructs double_int from rtx CST. */
613 double_int
614 rtx_to_double_int (const_rtx cst)
616 double_int r;
618 if (CONST_INT_P (cst))
619 r = double_int::from_shwi (INTVAL (cst));
620 else if (CONST_DOUBLE_AS_INT_P (cst))
622 r.low = CONST_DOUBLE_LOW (cst);
623 r.high = CONST_DOUBLE_HIGH (cst);
625 else
626 gcc_unreachable ();
628 return r;
630 #endif
632 #if TARGET_SUPPORTS_WIDE_INT
633 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
634 If so, return its counterpart; otherwise add it to the hash table and
635 return it. */
637 static rtx
638 lookup_const_wide_int (rtx wint)
640 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
641 if (*slot == 0)
642 *slot = wint;
644 return *slot;
646 #endif
648 /* Return an rtx constant for V, given that the constant has mode MODE.
649 The returned rtx will be a CONST_INT if V fits, otherwise it will be
650 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
651 (if TARGET_SUPPORTS_WIDE_INT). */
653 static rtx
654 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
656 unsigned int len = v.get_len ();
657 /* Not scalar_int_mode because we also allow pointer bound modes. */
658 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
660 /* Allow truncation but not extension since we do not know if the
661 number is signed or unsigned. */
662 gcc_assert (prec <= v.get_precision ());
664 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
665 return gen_int_mode (v.elt (0), mode);
667 #if TARGET_SUPPORTS_WIDE_INT
669 unsigned int i;
670 rtx value;
671 unsigned int blocks_needed
672 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
674 if (len > blocks_needed)
675 len = blocks_needed;
677 value = const_wide_int_alloc (len);
679 /* It is so tempting to just put the mode in here. Must control
680 myself ... */
681 PUT_MODE (value, VOIDmode);
682 CWI_PUT_NUM_ELEM (value, len);
684 for (i = 0; i < len; i++)
685 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
687 return lookup_const_wide_int (value);
689 #else
690 return immed_double_const (v.elt (0), v.elt (1), mode);
691 #endif
694 #if TARGET_SUPPORTS_WIDE_INT == 0
695 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
696 of ints: I0 is the low-order word and I1 is the high-order word.
697 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
698 implied upper bits are copies of the high bit of i1. The value
699 itself is neither signed nor unsigned. Do not use this routine for
700 non-integer modes; convert to REAL_VALUE_TYPE and use
701 const_double_from_real_value. */
704 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
706 rtx value;
707 unsigned int i;
709 /* There are the following cases (note that there are no modes with
710 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
712 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
713 gen_int_mode.
714 2) If the value of the integer fits into HOST_WIDE_INT anyway
715 (i.e., i1 consists only from copies of the sign bit, and sign
716 of i0 and i1 are the same), then we return a CONST_INT for i0.
717 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
718 scalar_mode smode;
719 if (is_a <scalar_mode> (mode, &smode)
720 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
721 return gen_int_mode (i0, mode);
723 /* If this integer fits in one word, return a CONST_INT. */
724 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
725 return GEN_INT (i0);
727 /* We use VOIDmode for integers. */
728 value = rtx_alloc (CONST_DOUBLE);
729 PUT_MODE (value, VOIDmode);
731 CONST_DOUBLE_LOW (value) = i0;
732 CONST_DOUBLE_HIGH (value) = i1;
734 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
735 XWINT (value, i) = 0;
737 return lookup_const_double (value);
739 #endif
741 /* Return an rtx representation of C in mode MODE. */
744 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
746 if (c.is_constant ())
747 return immed_wide_int_const_1 (c.coeffs[0], mode);
749 /* Not scalar_int_mode because we also allow pointer bound modes. */
750 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
752 /* Allow truncation but not extension since we do not know if the
753 number is signed or unsigned. */
754 gcc_assert (prec <= c.coeffs[0].get_precision ());
755 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
757 /* See whether we already have an rtx for this constant. */
758 inchash::hash h;
759 h.add_int (mode);
760 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
761 h.add_wide_int (newc.coeffs[i]);
762 const_poly_int_hasher::compare_type typed_value (mode, newc);
763 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
764 h.end (), INSERT);
765 rtx x = *slot;
766 if (x)
767 return x;
769 /* Create a new rtx. There's a choice to be made here between installing
770 the actual mode of the rtx or leaving it as VOIDmode (for consistency
771 with CONST_INT). In practice the handling of the codes is different
772 enough that we get no benefit from using VOIDmode, and various places
773 assume that VOIDmode implies CONST_INT. Using the real mode seems like
774 the right long-term direction anyway. */
775 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
776 size_t extra_size = twi::extra_size (prec);
777 x = rtx_alloc_v (CONST_POLY_INT,
778 sizeof (struct const_poly_int_def) + extra_size);
779 PUT_MODE (x, mode);
780 CONST_POLY_INT_COEFFS (x).set_precision (prec);
781 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
782 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
784 *slot = x;
785 return x;
789 gen_rtx_REG (machine_mode mode, unsigned int regno)
791 /* In case the MD file explicitly references the frame pointer, have
792 all such references point to the same frame pointer. This is
793 used during frame pointer elimination to distinguish the explicit
794 references to these registers from pseudos that happened to be
795 assigned to them.
797 If we have eliminated the frame pointer or arg pointer, we will
798 be using it as a normal register, for example as a spill
799 register. In such cases, we might be accessing it in a mode that
800 is not Pmode and therefore cannot use the pre-allocated rtx.
802 Also don't do this when we are making new REGs in reload, since
803 we don't want to get confused with the real pointers. */
805 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
807 if (regno == FRAME_POINTER_REGNUM
808 && (!reload_completed || frame_pointer_needed))
809 return frame_pointer_rtx;
811 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
812 && regno == HARD_FRAME_POINTER_REGNUM
813 && (!reload_completed || frame_pointer_needed))
814 return hard_frame_pointer_rtx;
815 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
816 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
817 && regno == ARG_POINTER_REGNUM)
818 return arg_pointer_rtx;
819 #endif
820 #ifdef RETURN_ADDRESS_POINTER_REGNUM
821 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
822 return return_address_pointer_rtx;
823 #endif
824 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
825 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
826 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
827 return pic_offset_table_rtx;
828 if (regno == STACK_POINTER_REGNUM)
829 return stack_pointer_rtx;
832 #if 0
833 /* If the per-function register table has been set up, try to re-use
834 an existing entry in that table to avoid useless generation of RTL.
836 This code is disabled for now until we can fix the various backends
837 which depend on having non-shared hard registers in some cases. Long
838 term we want to re-enable this code as it can significantly cut down
839 on the amount of useless RTL that gets generated.
841 We'll also need to fix some code that runs after reload that wants to
842 set ORIGINAL_REGNO. */
844 if (cfun
845 && cfun->emit
846 && regno_reg_rtx
847 && regno < FIRST_PSEUDO_REGISTER
848 && reg_raw_mode[regno] == mode)
849 return regno_reg_rtx[regno];
850 #endif
852 return gen_raw_REG (mode, regno);
856 gen_rtx_MEM (machine_mode mode, rtx addr)
858 rtx rt = gen_rtx_raw_MEM (mode, addr);
860 /* This field is not cleared by the mere allocation of the rtx, so
861 we clear it here. */
862 MEM_ATTRS (rt) = 0;
864 return rt;
867 /* Generate a memory referring to non-trapping constant memory. */
870 gen_const_mem (machine_mode mode, rtx addr)
872 rtx mem = gen_rtx_MEM (mode, addr);
873 MEM_READONLY_P (mem) = 1;
874 MEM_NOTRAP_P (mem) = 1;
875 return mem;
878 /* Generate a MEM referring to fixed portions of the frame, e.g., register
879 save areas. */
882 gen_frame_mem (machine_mode mode, rtx addr)
884 rtx mem = gen_rtx_MEM (mode, addr);
885 MEM_NOTRAP_P (mem) = 1;
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
890 /* Generate a MEM referring to a temporary use of the stack, not part
891 of the fixed stack frame. For example, something which is pushed
892 by a target splitter. */
894 gen_tmp_stack_mem (machine_mode mode, rtx addr)
896 rtx mem = gen_rtx_MEM (mode, addr);
897 MEM_NOTRAP_P (mem) = 1;
898 if (!cfun->calls_alloca)
899 set_mem_alias_set (mem, get_frame_alias_set ());
900 return mem;
903 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
904 this construct would be valid, and false otherwise. */
906 bool
907 validate_subreg (machine_mode omode, machine_mode imode,
908 const_rtx reg, poly_uint64 offset)
910 poly_uint64 isize = GET_MODE_SIZE (imode);
911 poly_uint64 osize = GET_MODE_SIZE (omode);
913 /* The sizes must be ordered, so that we know whether the subreg
914 is partial, paradoxical or complete. */
915 if (!ordered_p (isize, osize))
916 return false;
918 /* All subregs must be aligned. */
919 if (!multiple_p (offset, osize))
920 return false;
922 /* The subreg offset cannot be outside the inner object. */
923 if (maybe_ge (offset, isize))
924 return false;
926 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
928 /* ??? This should not be here. Temporarily continue to allow word_mode
929 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
930 Generally, backends are doing something sketchy but it'll take time to
931 fix them all. */
932 if (omode == word_mode)
934 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
935 is the culprit here, and not the backends. */
936 else if (known_ge (osize, regsize) && known_ge (isize, osize))
938 /* Allow component subregs of complex and vector. Though given the below
939 extraction rules, it's not always clear what that means. */
940 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
941 && GET_MODE_INNER (imode) == omode)
943 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
944 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
945 surely isn't the cleanest way to represent this. It's questionable
946 if this ought to be represented at all -- why can't this all be hidden
947 in post-reload splitters that make arbitrarily mode changes to the
948 registers themselves. */
949 else if (VECTOR_MODE_P (omode)
950 && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
952 /* Subregs involving floating point modes are not allowed to
953 change size unless it's an insert into a complex mode.
954 Therefore (subreg:DI (reg:DF) 0) and (subreg:CS (reg:SF) 0) are fine, but
955 (subreg:SI (reg:DF) 0) isn't. */
956 else if ((FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
957 && !COMPLEX_MODE_P (omode))
959 if (! (known_eq (isize, osize)
960 /* LRA can use subreg to store a floating point value in
961 an integer mode. Although the floating point and the
962 integer modes need the same number of hard registers,
963 the size of floating point mode can be less than the
964 integer mode. LRA also uses subregs for a register
965 should be used in different mode in on insn. */
966 || lra_in_progress))
967 return false;
970 /* Paradoxical subregs must have offset zero. */
971 if (maybe_gt (osize, isize))
972 return known_eq (offset, 0U);
974 /* This is a normal subreg. Verify that the offset is representable. */
976 /* For hard registers, we already have most of these rules collected in
977 subreg_offset_representable_p. */
978 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
980 unsigned int regno = REGNO (reg);
982 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
983 && GET_MODE_INNER (imode) == omode)
985 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
986 return false;
988 return subreg_offset_representable_p (regno, imode, offset, omode);
991 /* The outer size must be ordered wrt the register size, otherwise
992 we wouldn't know at compile time how many registers the outer
993 mode occupies. */
994 if (!ordered_p (osize, regsize))
995 return false;
997 /* For pseudo registers, we want most of the same checks. Namely:
999 Assume that the pseudo register will be allocated to hard registers
1000 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
1001 the remainder must correspond to the lowpart of the containing hard
1002 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
1003 otherwise it is at the lowest offset.
1005 Given that we've already checked the mode and offset alignment,
1006 we only have to check subblock subregs here. */
1007 if (maybe_lt (osize, regsize)
1008 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1010 /* It is invalid for the target to pick a register size for a mode
1011 that isn't ordered wrt to the size of that mode. */
1012 poly_uint64 block_size = ordered_min (isize, regsize);
1013 unsigned int start_reg;
1014 poly_uint64 offset_within_reg;
1015 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1016 || (BYTES_BIG_ENDIAN
1017 ? maybe_ne (offset_within_reg, block_size - osize)
1018 : maybe_ne (offset_within_reg, 0U)))
1019 return false;
1021 return true;
1025 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1027 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1028 return gen_rtx_raw_SUBREG (mode, reg, offset);
1031 /* Generate a SUBREG representing the least-significant part of REG if MODE
1032 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1035 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1037 machine_mode inmode;
1039 inmode = GET_MODE (reg);
1040 if (inmode == VOIDmode)
1041 inmode = mode;
1042 return gen_rtx_SUBREG (mode, reg,
1043 subreg_lowpart_offset (mode, inmode));
1047 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1048 enum var_init_status status)
1050 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1051 PAT_VAR_LOCATION_STATUS (x) = status;
1052 return x;
1056 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1058 rtvec
1059 gen_rtvec (int n, ...)
1061 int i;
1062 rtvec rt_val;
1063 va_list p;
1065 va_start (p, n);
1067 /* Don't allocate an empty rtvec... */
1068 if (n == 0)
1070 va_end (p);
1071 return NULL_RTVEC;
1074 rt_val = rtvec_alloc (n);
1076 for (i = 0; i < n; i++)
1077 rt_val->elem[i] = va_arg (p, rtx);
1079 va_end (p);
1080 return rt_val;
1083 rtvec
1084 gen_rtvec_v (int n, rtx *argp)
1086 int i;
1087 rtvec rt_val;
1089 /* Don't allocate an empty rtvec... */
1090 if (n == 0)
1091 return NULL_RTVEC;
1093 rt_val = rtvec_alloc (n);
1095 for (i = 0; i < n; i++)
1096 rt_val->elem[i] = *argp++;
1098 return rt_val;
1101 rtvec
1102 gen_rtvec_v (int n, rtx_insn **argp)
1104 int i;
1105 rtvec rt_val;
1107 /* Don't allocate an empty rtvec... */
1108 if (n == 0)
1109 return NULL_RTVEC;
1111 rt_val = rtvec_alloc (n);
1113 for (i = 0; i < n; i++)
1114 rt_val->elem[i] = *argp++;
1116 return rt_val;
1120 /* Return the number of bytes between the start of an OUTER_MODE
1121 in-memory value and the start of an INNER_MODE in-memory value,
1122 given that the former is a lowpart of the latter. It may be a
1123 paradoxical lowpart, in which case the offset will be negative
1124 on big-endian targets. */
1126 poly_int64
1127 byte_lowpart_offset (machine_mode outer_mode,
1128 machine_mode inner_mode)
1130 if (paradoxical_subreg_p (outer_mode, inner_mode))
1131 return -subreg_lowpart_offset (inner_mode, outer_mode);
1132 else
1133 return subreg_lowpart_offset (outer_mode, inner_mode);
1136 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1137 from address X. For paradoxical big-endian subregs this is a
1138 negative value, otherwise it's the same as OFFSET. */
1140 poly_int64
1141 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1142 poly_uint64 offset)
1144 if (paradoxical_subreg_p (outer_mode, inner_mode))
1146 gcc_assert (known_eq (offset, 0U));
1147 return -subreg_lowpart_offset (inner_mode, outer_mode);
1149 return offset;
1152 /* As above, but return the offset that existing subreg X would have
1153 if SUBREG_REG (X) were stored in memory. The only significant thing
1154 about the current SUBREG_REG is its mode. */
1156 poly_int64
1157 subreg_memory_offset (const_rtx x)
1159 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1160 SUBREG_BYTE (x));
1163 /* Generate a REG rtx for a new pseudo register of mode MODE.
1164 This pseudo is assigned the next sequential register number. */
1167 gen_reg_rtx (machine_mode mode)
1169 rtx val;
1170 unsigned int align = GET_MODE_ALIGNMENT (mode);
1172 gcc_assert (can_create_pseudo_p ());
1174 /* If a virtual register with bigger mode alignment is generated,
1175 increase stack alignment estimation because it might be spilled
1176 to stack later. */
1177 if (SUPPORTS_STACK_ALIGNMENT
1178 && crtl->stack_alignment_estimated < align
1179 && !crtl->stack_realign_processed)
1181 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1182 if (crtl->stack_alignment_estimated < min_align)
1183 crtl->stack_alignment_estimated = min_align;
1186 if (generating_concat_p
1187 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1188 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1190 /* For complex modes, don't make a single pseudo.
1191 Instead, make a CONCAT of two pseudos.
1192 This allows noncontiguous allocation of the real and imaginary parts,
1193 which makes much better code. Besides, allocating DCmode
1194 pseudos overstrains reload on some machines like the 386. */
1195 rtx realpart, imagpart;
1196 machine_mode partmode = GET_MODE_INNER (mode);
1198 realpart = gen_reg_rtx (partmode);
1199 imagpart = gen_reg_rtx (partmode);
1200 return gen_rtx_CONCAT (mode, realpart, imagpart);
1203 /* Do not call gen_reg_rtx with uninitialized crtl. */
1204 gcc_assert (crtl->emit.regno_pointer_align_length);
1206 crtl->emit.ensure_regno_capacity ();
1207 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1209 val = gen_raw_REG (mode, reg_rtx_no);
1210 regno_reg_rtx[reg_rtx_no++] = val;
1211 return val;
1214 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1215 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1217 void
1218 emit_status::ensure_regno_capacity ()
1220 int old_size = regno_pointer_align_length;
1222 if (reg_rtx_no < old_size)
1223 return;
1225 int new_size = old_size * 2;
1226 while (reg_rtx_no >= new_size)
1227 new_size *= 2;
1229 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1230 memset (tmp + old_size, 0, new_size - old_size);
1231 regno_pointer_align = (unsigned char *) tmp;
1233 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1234 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1235 regno_reg_rtx = new1;
1237 crtl->emit.regno_pointer_align_length = new_size;
1240 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1242 bool
1243 reg_is_parm_p (rtx reg)
1245 tree decl;
1247 gcc_assert (REG_P (reg));
1248 decl = REG_EXPR (reg);
1249 return (decl && TREE_CODE (decl) == PARM_DECL);
1252 /* Update NEW with the same attributes as REG, but with OFFSET added
1253 to the REG_OFFSET. */
1255 static void
1256 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1258 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1259 REG_OFFSET (reg) + offset);
1262 /* Generate a register with same attributes as REG, but with OFFSET
1263 added to the REG_OFFSET. */
1266 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1267 poly_int64 offset)
1269 /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
1270 overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
1271 of the shared REG rtxes like stack_pointer_rtx etc. This should
1272 happen only for SUBREGs from DEBUG_INSNs, RA should ensure
1273 multi-word registers don't overlap the special registers like
1274 stack pointer. */
1275 rtx new_rtx = gen_raw_REG (mode, regno);
1277 update_reg_offset (new_rtx, reg, offset);
1278 return new_rtx;
1281 /* Generate a new pseudo-register with the same attributes as REG, but
1282 with OFFSET added to the REG_OFFSET. */
1285 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1287 rtx new_rtx = gen_reg_rtx (mode);
1289 update_reg_offset (new_rtx, reg, offset);
1290 return new_rtx;
1293 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1294 new register is a (possibly paradoxical) lowpart of the old one. */
1296 void
1297 adjust_reg_mode (rtx reg, machine_mode mode)
1299 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1300 PUT_MODE (reg, mode);
1303 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1304 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1306 void
1307 set_reg_attrs_from_value (rtx reg, rtx x)
1309 poly_int64 offset;
1310 bool can_be_reg_pointer = true;
1312 /* Don't call mark_reg_pointer for incompatible pointer sign
1313 extension. */
1314 while (GET_CODE (x) == SIGN_EXTEND
1315 || GET_CODE (x) == ZERO_EXTEND
1316 || GET_CODE (x) == TRUNCATE
1317 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1319 #if defined(POINTERS_EXTEND_UNSIGNED)
1320 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1321 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1322 || (paradoxical_subreg_p (x)
1323 && ! (SUBREG_PROMOTED_VAR_P (x)
1324 && SUBREG_CHECK_PROMOTED_SIGN (x,
1325 POINTERS_EXTEND_UNSIGNED))))
1326 && !targetm.have_ptr_extend ())
1327 can_be_reg_pointer = false;
1328 #endif
1329 x = XEXP (x, 0);
1332 /* Hard registers can be reused for multiple purposes within the same
1333 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1334 on them is wrong. */
1335 if (HARD_REGISTER_P (reg))
1336 return;
1338 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1339 if (MEM_P (x))
1341 if (MEM_OFFSET_KNOWN_P (x))
1342 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1343 MEM_OFFSET (x) + offset);
1344 if (can_be_reg_pointer && MEM_POINTER (x))
1345 mark_reg_pointer (reg, 0);
1347 else if (REG_P (x))
1349 if (REG_ATTRS (x))
1350 update_reg_offset (reg, x, offset);
1351 if (can_be_reg_pointer && REG_POINTER (x))
1352 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1356 /* Generate a REG rtx for a new pseudo register, copying the mode
1357 and attributes from X. */
1360 gen_reg_rtx_and_attrs (rtx x)
1362 rtx reg = gen_reg_rtx (GET_MODE (x));
1363 set_reg_attrs_from_value (reg, x);
1364 return reg;
1367 /* Set the register attributes for registers contained in PARM_RTX.
1368 Use needed values from memory attributes of MEM. */
1370 void
1371 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1373 if (REG_P (parm_rtx))
1374 set_reg_attrs_from_value (parm_rtx, mem);
1375 else if (GET_CODE (parm_rtx) == PARALLEL)
1377 /* Check for a NULL entry in the first slot, used to indicate that the
1378 parameter goes both on the stack and in registers. */
1379 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1380 for (; i < XVECLEN (parm_rtx, 0); i++)
1382 rtx x = XVECEXP (parm_rtx, 0, i);
1383 if (REG_P (XEXP (x, 0)))
1384 REG_ATTRS (XEXP (x, 0))
1385 = get_reg_attrs (MEM_EXPR (mem),
1386 INTVAL (XEXP (x, 1)));
1391 /* Set the REG_ATTRS for registers in value X, given that X represents
1392 decl T. */
1394 void
1395 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1397 if (!t)
1398 return;
1399 tree tdecl = t;
1400 if (GET_CODE (x) == SUBREG)
1402 gcc_assert (subreg_lowpart_p (x));
1403 x = SUBREG_REG (x);
1405 if (REG_P (x))
1406 REG_ATTRS (x)
1407 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1408 DECL_P (tdecl)
1409 ? DECL_MODE (tdecl)
1410 : TYPE_MODE (TREE_TYPE (tdecl))));
1411 if (GET_CODE (x) == CONCAT)
1413 if (REG_P (XEXP (x, 0)))
1414 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1415 if (REG_P (XEXP (x, 1)))
1416 REG_ATTRS (XEXP (x, 1))
1417 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1419 if (GET_CODE (x) == PARALLEL)
1421 int i, start;
1423 /* Check for a NULL entry, used to indicate that the parameter goes
1424 both on the stack and in registers. */
1425 if (XEXP (XVECEXP (x, 0, 0), 0))
1426 start = 0;
1427 else
1428 start = 1;
1430 for (i = start; i < XVECLEN (x, 0); i++)
1432 rtx y = XVECEXP (x, 0, i);
1433 if (REG_P (XEXP (y, 0)))
1434 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1439 /* Assign the RTX X to declaration T. */
1441 void
1442 set_decl_rtl (tree t, rtx x)
1444 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1445 if (x)
1446 set_reg_attrs_for_decl_rtl (t, x);
1449 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1450 if the ABI requires the parameter to be passed by reference. */
1452 void
1453 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1455 DECL_INCOMING_RTL (t) = x;
1456 if (x && !by_reference_p)
1457 set_reg_attrs_for_decl_rtl (t, x);
1460 /* Identify REG (which may be a CONCAT) as a user register. */
1462 void
1463 mark_user_reg (rtx reg)
1465 if (GET_CODE (reg) == CONCAT)
1467 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1468 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1470 else
1472 gcc_assert (REG_P (reg));
1473 REG_USERVAR_P (reg) = 1;
1477 /* Identify REG as a probable pointer register and show its alignment
1478 as ALIGN, if nonzero. */
1480 void
1481 mark_reg_pointer (rtx reg, int align)
1483 if (! REG_POINTER (reg))
1485 REG_POINTER (reg) = 1;
1487 if (align)
1488 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1490 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1491 /* We can no-longer be sure just how aligned this pointer is. */
1492 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1495 /* Return 1 plus largest pseudo reg number used in the current function. */
1498 max_reg_num (void)
1500 return reg_rtx_no;
1503 /* Return 1 + the largest label number used so far in the current function. */
1506 max_label_num (void)
1508 return label_num;
1511 /* Return first label number used in this function (if any were used). */
1514 get_first_label_num (void)
1516 return first_label_num;
1519 /* If the rtx for label was created during the expansion of a nested
1520 function, then first_label_num won't include this label number.
1521 Fix this now so that array indices work later. */
1523 void
1524 maybe_set_first_label_num (rtx_code_label *x)
1526 if (CODE_LABEL_NUMBER (x) < first_label_num)
1527 first_label_num = CODE_LABEL_NUMBER (x);
1530 /* For use by the RTL function loader, when mingling with normal
1531 functions.
1532 Ensure that label_num is greater than the label num of X, to avoid
1533 duplicate labels in the generated assembler. */
1535 void
1536 maybe_set_max_label_num (rtx_code_label *x)
1538 if (CODE_LABEL_NUMBER (x) >= label_num)
1539 label_num = CODE_LABEL_NUMBER (x) + 1;
1543 /* Return a value representing some low-order bits of X, where the number
1544 of low-order bits is given by MODE. Note that no conversion is done
1545 between floating-point and fixed-point values, rather, the bit
1546 representation is returned.
1548 This function handles the cases in common between gen_lowpart, below,
1549 and two variants in cse.cc and combine.cc. These are the cases that can
1550 be safely handled at all points in the compilation.
1552 If this is not a case we can handle, return 0. */
1555 gen_lowpart_common (machine_mode mode, rtx x)
1557 poly_uint64 msize = GET_MODE_SIZE (mode);
1558 machine_mode innermode;
1560 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1561 so we have to make one up. Yuk. */
1562 innermode = GET_MODE (x);
1563 if (CONST_INT_P (x)
1564 && known_le (msize * BITS_PER_UNIT,
1565 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1566 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1567 else if (innermode == VOIDmode)
1568 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1570 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1572 if (innermode == mode)
1573 return x;
1575 /* The size of the outer and inner modes must be ordered. */
1576 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1577 if (!ordered_p (msize, xsize))
1578 return 0;
1580 if (SCALAR_FLOAT_MODE_P (mode))
1582 /* Don't allow paradoxical FLOAT_MODE subregs. */
1583 if (maybe_gt (msize, xsize))
1584 return 0;
1586 else
1588 /* MODE must occupy no more of the underlying registers than X. */
1589 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1590 unsigned int mregs, xregs;
1591 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1592 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1593 || mregs > xregs)
1594 return 0;
1597 scalar_int_mode int_mode, int_innermode, from_mode;
1598 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1599 && is_a <scalar_int_mode> (mode, &int_mode)
1600 && is_a <scalar_int_mode> (innermode, &int_innermode)
1601 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1603 /* If we are getting the low-order part of something that has been
1604 sign- or zero-extended, we can either just use the object being
1605 extended or make a narrower extension. If we want an even smaller
1606 piece than the size of the object being extended, call ourselves
1607 recursively.
1609 This case is used mostly by combine and cse. */
1611 if (from_mode == int_mode)
1612 return XEXP (x, 0);
1613 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1614 return gen_lowpart_common (int_mode, XEXP (x, 0));
1615 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1616 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1618 else if (GET_CODE (x) == SUBREG || REG_P (x)
1619 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1620 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1621 || CONST_POLY_INT_P (x))
1622 return lowpart_subreg (mode, x, innermode);
1624 /* Otherwise, we can't do this. */
1625 return 0;
1629 gen_highpart (machine_mode mode, rtx x)
1631 poly_uint64 msize = GET_MODE_SIZE (mode);
1632 rtx result;
1634 /* This case loses if X is a subreg. To catch bugs early,
1635 complain if an invalid MODE is used even in other cases. */
1636 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1637 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1639 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1640 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1641 it will produce something meaningful. The only case we need to handle
1642 specially here is MEM. */
1643 if (MEM_P (x))
1645 poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
1646 return adjust_address (x, mode, offset);
1649 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1650 subreg_highpart_offset (mode, GET_MODE (x)));
1651 /* Since we handle MEM directly above, we should never get a MEM back
1652 from simplify_gen_subreg. */
1653 gcc_assert (result && !MEM_P (result));
1655 return result;
1658 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1659 be VOIDmode constant. */
1661 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1663 if (GET_MODE (exp) != VOIDmode)
1665 gcc_assert (GET_MODE (exp) == innermode);
1666 return gen_highpart (outermode, exp);
1668 return simplify_gen_subreg (outermode, exp, innermode,
1669 subreg_highpart_offset (outermode, innermode));
1672 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1673 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1675 poly_uint64
1676 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1678 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1679 if (maybe_gt (outer_bytes, inner_bytes))
1680 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1681 return 0;
1683 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1684 return inner_bytes - outer_bytes;
1685 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1686 return 0;
1687 else
1688 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1691 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1692 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1694 poly_uint64
1695 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1697 gcc_assert (known_ge (inner_bytes, outer_bytes));
1699 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1700 return 0;
1701 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1702 return inner_bytes - outer_bytes;
1703 else
1704 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1705 (inner_bytes - outer_bytes)
1706 * BITS_PER_UNIT);
1709 /* Return 1 iff X, assumed to be a SUBREG,
1710 refers to the least significant part of its containing reg.
1711 If X is not a SUBREG, always return 1 (it is its own low part!). */
1714 subreg_lowpart_p (const_rtx x)
1716 if (GET_CODE (x) != SUBREG)
1717 return 1;
1718 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1719 return 0;
1721 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1722 GET_MODE (SUBREG_REG (x))),
1723 SUBREG_BYTE (x));
1726 /* Return subword OFFSET of operand OP.
1727 The word number, OFFSET, is interpreted as the word number starting
1728 at the low-order address. OFFSET 0 is the low-order word if not
1729 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1731 If we cannot extract the required word, we return zero. Otherwise,
1732 an rtx corresponding to the requested word will be returned.
1734 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1735 reload has completed, a valid address will always be returned. After
1736 reload, if a valid address cannot be returned, we return zero.
1738 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1739 it is the responsibility of the caller.
1741 MODE is the mode of OP in case it is a CONST_INT.
1743 ??? This is still rather broken for some cases. The problem for the
1744 moment is that all callers of this thing provide no 'goal mode' to
1745 tell us to work with. This exists because all callers were written
1746 in a word based SUBREG world.
1747 Now use of this function can be deprecated by simplify_subreg in most
1748 cases.
1752 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1753 machine_mode mode)
1755 if (mode == VOIDmode)
1756 mode = GET_MODE (op);
1758 gcc_assert (mode != VOIDmode);
1760 /* If OP is narrower than a word, fail. */
1761 if (mode != BLKmode
1762 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1763 return 0;
1765 /* If we want a word outside OP, return zero. */
1766 if (mode != BLKmode
1767 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1768 return const0_rtx;
1770 /* Form a new MEM at the requested address. */
1771 if (MEM_P (op))
1773 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1775 if (! validate_address)
1776 return new_rtx;
1778 else if (reload_completed)
1780 if (! strict_memory_address_addr_space_p (word_mode,
1781 XEXP (new_rtx, 0),
1782 MEM_ADDR_SPACE (op)))
1783 return 0;
1785 else
1786 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1789 /* Rest can be handled by simplify_subreg. */
1790 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1793 /* Similar to `operand_subword', but never return 0. If we can't
1794 extract the required subword, put OP into a register and try again.
1795 The second attempt must succeed. We always validate the address in
1796 this case.
1798 MODE is the mode of OP, in case it is CONST_INT. */
1801 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1803 rtx result = operand_subword (op, offset, 1, mode);
1805 if (result)
1806 return result;
1808 if (mode != BLKmode && mode != VOIDmode)
1810 /* If this is a register which cannot be accessed by words, copy it
1811 to a pseudo register. */
1812 if (REG_P (op))
1813 op = copy_to_reg (op);
1814 else
1815 op = force_reg (mode, op);
1818 result = operand_subword (op, offset, 1, mode);
1819 gcc_assert (result);
1821 return result;
1824 mem_attrs::mem_attrs ()
1825 : expr (NULL_TREE),
1826 offset (0),
1827 size (0),
1828 alias (0),
1829 align (0),
1830 addrspace (ADDR_SPACE_GENERIC),
1831 offset_known_p (false),
1832 size_known_p (false)
1835 /* Returns 1 if both MEM_EXPR can be considered equal
1836 and 0 otherwise. */
1839 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1841 if (expr1 == expr2)
1842 return 1;
1844 if (! expr1 || ! expr2)
1845 return 0;
1847 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1848 return 0;
1850 return operand_equal_p (expr1, expr2, 0);
1853 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1854 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1855 -1 if not known. */
1858 get_mem_align_offset (rtx mem, unsigned int align)
1860 tree expr;
1861 poly_uint64 offset;
1863 /* This function can't use
1864 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1865 || (MAX (MEM_ALIGN (mem),
1866 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1867 < align))
1868 return -1;
1869 else
1870 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1871 for two reasons:
1872 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1873 for <variable>. get_inner_reference doesn't handle it and
1874 even if it did, the alignment in that case needs to be determined
1875 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1876 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1877 isn't sufficiently aligned, the object it is in might be. */
1878 gcc_assert (MEM_P (mem));
1879 expr = MEM_EXPR (mem);
1880 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1881 return -1;
1883 offset = MEM_OFFSET (mem);
1884 if (DECL_P (expr))
1886 if (DECL_ALIGN (expr) < align)
1887 return -1;
1889 else if (INDIRECT_REF_P (expr))
1891 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1892 return -1;
1894 else if (TREE_CODE (expr) == COMPONENT_REF)
1896 while (1)
1898 tree inner = TREE_OPERAND (expr, 0);
1899 tree field = TREE_OPERAND (expr, 1);
1900 tree byte_offset = component_ref_field_offset (expr);
1901 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1903 poly_uint64 suboffset;
1904 if (!byte_offset
1905 || !poly_int_tree_p (byte_offset, &suboffset)
1906 || !tree_fits_uhwi_p (bit_offset))
1907 return -1;
1909 offset += suboffset;
1910 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1912 if (inner == NULL_TREE)
1914 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1915 < (unsigned int) align)
1916 return -1;
1917 break;
1919 else if (DECL_P (inner))
1921 if (DECL_ALIGN (inner) < align)
1922 return -1;
1923 break;
1925 else if (TREE_CODE (inner) != COMPONENT_REF)
1926 return -1;
1927 expr = inner;
1930 else
1931 return -1;
1933 HOST_WIDE_INT misalign;
1934 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1935 return -1;
1936 return misalign;
1939 /* Given REF (a MEM) and T, either the type of X or the expression
1940 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1941 if we are making a new object of this type. BITPOS is nonzero if
1942 there is an offset outstanding on T that will be applied later. */
1944 void
1945 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1946 poly_int64 bitpos)
1948 poly_int64 apply_bitpos = 0;
1949 tree type;
1950 class mem_attrs attrs, *defattrs, *refattrs;
1951 addr_space_t as;
1953 /* It can happen that type_for_mode was given a mode for which there
1954 is no language-level type. In which case it returns NULL, which
1955 we can see here. */
1956 if (t == NULL_TREE)
1957 return;
1959 type = TYPE_P (t) ? t : TREE_TYPE (t);
1960 if (type == error_mark_node)
1961 return;
1963 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1964 wrong answer, as it assumes that DECL_RTL already has the right alias
1965 info. Callers should not set DECL_RTL until after the call to
1966 set_mem_attributes. */
1967 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1969 /* Get the alias set from the expression or type (perhaps using a
1970 front-end routine) and use it. */
1971 attrs.alias = get_alias_set (t);
1973 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1974 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1976 /* Default values from pre-existing memory attributes if present. */
1977 refattrs = MEM_ATTRS (ref);
1978 if (refattrs)
1980 /* ??? Can this ever happen? Calling this routine on a MEM that
1981 already carries memory attributes should probably be invalid. */
1982 attrs.expr = refattrs->expr;
1983 attrs.offset_known_p = refattrs->offset_known_p;
1984 attrs.offset = refattrs->offset;
1985 attrs.size_known_p = refattrs->size_known_p;
1986 attrs.size = refattrs->size;
1987 attrs.align = refattrs->align;
1990 /* Otherwise, default values from the mode of the MEM reference. */
1991 else
1993 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1994 gcc_assert (!defattrs->expr);
1995 gcc_assert (!defattrs->offset_known_p);
1997 /* Respect mode size. */
1998 attrs.size_known_p = defattrs->size_known_p;
1999 attrs.size = defattrs->size;
2000 /* ??? Is this really necessary? We probably should always get
2001 the size from the type below. */
2003 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
2004 if T is an object, always compute the object alignment below. */
2005 if (TYPE_P (t))
2006 attrs.align = defattrs->align;
2007 else
2008 attrs.align = BITS_PER_UNIT;
2009 /* ??? If T is a type, respecting mode alignment may *also* be wrong
2010 e.g. if the type carries an alignment attribute. Should we be
2011 able to simply always use TYPE_ALIGN? */
2014 /* We can set the alignment from the type if we are making an object or if
2015 this is an INDIRECT_REF. */
2016 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2017 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2019 /* If the size is known, we can set that. */
2020 tree new_size = TYPE_SIZE_UNIT (type);
2022 /* The address-space is that of the type. */
2023 as = TYPE_ADDR_SPACE (type);
2025 /* If T is not a type, we may be able to deduce some more information about
2026 the expression. */
2027 if (! TYPE_P (t))
2029 tree base;
2031 if (TREE_THIS_VOLATILE (t))
2032 MEM_VOLATILE_P (ref) = 1;
2034 /* Now remove any conversions: they don't change what the underlying
2035 object is. Likewise for SAVE_EXPR. */
2036 while (CONVERT_EXPR_P (t)
2037 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2038 || TREE_CODE (t) == SAVE_EXPR)
2039 t = TREE_OPERAND (t, 0);
2041 /* Note whether this expression can trap. */
2042 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2044 base = get_base_address (t);
2045 if (base)
2047 if (DECL_P (base)
2048 && TREE_READONLY (base)
2049 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2050 && !TREE_THIS_VOLATILE (base))
2051 MEM_READONLY_P (ref) = 1;
2053 /* Mark static const strings readonly as well. */
2054 if (TREE_CODE (base) == STRING_CST
2055 && TREE_READONLY (base)
2056 && TREE_STATIC (base))
2057 MEM_READONLY_P (ref) = 1;
2059 /* Address-space information is on the base object. */
2060 if (TREE_CODE (base) == MEM_REF
2061 || TREE_CODE (base) == TARGET_MEM_REF)
2062 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2063 0))));
2064 else
2065 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2068 /* If this expression uses it's parent's alias set, mark it such
2069 that we won't change it. */
2070 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2071 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2073 /* If this is a decl, set the attributes of the MEM from it. */
2074 if (DECL_P (t))
2076 attrs.expr = t;
2077 attrs.offset_known_p = true;
2078 attrs.offset = 0;
2079 apply_bitpos = bitpos;
2080 new_size = DECL_SIZE_UNIT (t);
2083 /* ??? If we end up with a constant or a descriptor do not
2084 record a MEM_EXPR. */
2085 else if (CONSTANT_CLASS_P (t)
2086 || TREE_CODE (t) == CONSTRUCTOR)
2089 /* If this is a field reference, record it. */
2090 else if (TREE_CODE (t) == COMPONENT_REF)
2092 attrs.expr = t;
2093 attrs.offset_known_p = true;
2094 attrs.offset = 0;
2095 apply_bitpos = bitpos;
2096 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2097 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2100 /* Else record it. */
2101 else
2103 gcc_assert (handled_component_p (t)
2104 || TREE_CODE (t) == MEM_REF
2105 || TREE_CODE (t) == TARGET_MEM_REF);
2106 attrs.expr = t;
2107 attrs.offset_known_p = true;
2108 attrs.offset = 0;
2109 apply_bitpos = bitpos;
2112 /* If this is a reference based on a partitioned decl replace the
2113 base with a MEM_REF of the pointer representative we created
2114 during stack slot partitioning. */
2115 if (attrs.expr
2116 && VAR_P (base)
2117 && ! is_global_var (base)
2118 && cfun->gimple_df->decls_to_pointers != NULL)
2120 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2121 if (namep)
2123 attrs.expr = unshare_expr (attrs.expr);
2124 tree *orig_base = &attrs.expr;
2125 while (handled_component_p (*orig_base))
2126 orig_base = &TREE_OPERAND (*orig_base, 0);
2127 tree aptrt = reference_alias_ptr_type (*orig_base);
2128 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2129 build_int_cst (aptrt, 0));
2133 /* Compute the alignment. */
2134 unsigned int obj_align;
2135 unsigned HOST_WIDE_INT obj_bitpos;
2136 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2137 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2138 if (diff_align != 0)
2139 obj_align = MIN (obj_align, diff_align);
2140 attrs.align = MAX (attrs.align, obj_align);
2143 poly_uint64 const_size;
2144 if (poly_int_tree_p (new_size, &const_size))
2146 attrs.size_known_p = true;
2147 attrs.size = const_size;
2150 /* If we modified OFFSET based on T, then subtract the outstanding
2151 bit position offset. Similarly, increase the size of the accessed
2152 object to contain the negative offset. */
2153 if (maybe_ne (apply_bitpos, 0))
2155 gcc_assert (attrs.offset_known_p);
2156 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2157 attrs.offset -= bytepos;
2158 if (attrs.size_known_p)
2159 attrs.size += bytepos;
2162 /* Now set the attributes we computed above. */
2163 attrs.addrspace = as;
2164 set_mem_attrs (ref, &attrs);
2167 void
2168 set_mem_attributes (rtx ref, tree t, int objectp)
2170 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2173 /* Set the alias set of MEM to SET. */
2175 void
2176 set_mem_alias_set (rtx mem, alias_set_type set)
2178 /* If the new and old alias sets don't conflict, something is wrong. */
2179 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2180 mem_attrs attrs (*get_mem_attrs (mem));
2181 attrs.alias = set;
2182 set_mem_attrs (mem, &attrs);
2185 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2187 void
2188 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2190 mem_attrs attrs (*get_mem_attrs (mem));
2191 attrs.addrspace = addrspace;
2192 set_mem_attrs (mem, &attrs);
2195 /* Set the alignment of MEM to ALIGN bits. */
2197 void
2198 set_mem_align (rtx mem, unsigned int align)
2200 mem_attrs attrs (*get_mem_attrs (mem));
2201 attrs.align = align;
2202 set_mem_attrs (mem, &attrs);
2205 /* Set the expr for MEM to EXPR. */
2207 void
2208 set_mem_expr (rtx mem, tree expr)
2210 mem_attrs attrs (*get_mem_attrs (mem));
2211 attrs.expr = expr;
2212 set_mem_attrs (mem, &attrs);
2215 /* Set the offset of MEM to OFFSET. */
2217 void
2218 set_mem_offset (rtx mem, poly_int64 offset)
2220 mem_attrs attrs (*get_mem_attrs (mem));
2221 attrs.offset_known_p = true;
2222 attrs.offset = offset;
2223 set_mem_attrs (mem, &attrs);
2226 /* Clear the offset of MEM. */
2228 void
2229 clear_mem_offset (rtx mem)
2231 mem_attrs attrs (*get_mem_attrs (mem));
2232 attrs.offset_known_p = false;
2233 set_mem_attrs (mem, &attrs);
2236 /* Set the size of MEM to SIZE. */
2238 void
2239 set_mem_size (rtx mem, poly_int64 size)
2241 mem_attrs attrs (*get_mem_attrs (mem));
2242 attrs.size_known_p = true;
2243 attrs.size = size;
2244 set_mem_attrs (mem, &attrs);
2247 /* Clear the size of MEM. */
2249 void
2250 clear_mem_size (rtx mem)
2252 mem_attrs attrs (*get_mem_attrs (mem));
2253 attrs.size_known_p = false;
2254 set_mem_attrs (mem, &attrs);
2257 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2258 and its address changed to ADDR. (VOIDmode means don't change the mode.
2259 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2260 returned memory location is required to be valid. INPLACE is true if any
2261 changes can be made directly to MEMREF or false if MEMREF must be treated
2262 as immutable.
2264 The memory attributes are not changed. */
2266 static rtx
2267 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2268 bool inplace)
2270 addr_space_t as;
2271 rtx new_rtx;
2273 gcc_assert (MEM_P (memref));
2274 as = MEM_ADDR_SPACE (memref);
2275 if (mode == VOIDmode)
2276 mode = GET_MODE (memref);
2277 if (addr == 0)
2278 addr = XEXP (memref, 0);
2279 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2280 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2281 return memref;
2283 /* Don't validate address for LRA. LRA can make the address valid
2284 by itself in most efficient way. */
2285 if (validate && !lra_in_progress)
2287 if (reload_in_progress || reload_completed)
2288 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2289 else
2290 addr = memory_address_addr_space (mode, addr, as);
2293 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2294 return memref;
2296 if (inplace)
2298 XEXP (memref, 0) = addr;
2299 return memref;
2302 new_rtx = gen_rtx_MEM (mode, addr);
2303 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2304 return new_rtx;
2307 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2308 way we are changing MEMREF, so we only preserve the alias set. */
2311 change_address (rtx memref, machine_mode mode, rtx addr)
2313 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2314 machine_mode mmode = GET_MODE (new_rtx);
2315 class mem_attrs *defattrs;
2317 mem_attrs attrs (*get_mem_attrs (memref));
2318 defattrs = mode_mem_attrs[(int) mmode];
2319 attrs.expr = NULL_TREE;
2320 attrs.offset_known_p = false;
2321 attrs.size_known_p = defattrs->size_known_p;
2322 attrs.size = defattrs->size;
2323 attrs.align = defattrs->align;
2325 /* If there are no changes, just return the original memory reference. */
2326 if (new_rtx == memref)
2328 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2329 return new_rtx;
2331 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2332 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2335 set_mem_attrs (new_rtx, &attrs);
2336 return new_rtx;
2339 /* Return a memory reference like MEMREF, but with its mode changed
2340 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2341 nonzero, the memory address is forced to be valid.
2342 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2343 and the caller is responsible for adjusting MEMREF base register.
2344 If ADJUST_OBJECT is zero, the underlying object associated with the
2345 memory reference is left unchanged and the caller is responsible for
2346 dealing with it. Otherwise, if the new memory reference is outside
2347 the underlying object, even partially, then the object is dropped.
2348 SIZE, if nonzero, is the size of an access in cases where MODE
2349 has no inherent size. */
2352 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2353 int validate, int adjust_address, int adjust_object,
2354 poly_int64 size)
2356 rtx addr = XEXP (memref, 0);
2357 rtx new_rtx;
2358 scalar_int_mode address_mode;
2359 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2360 unsigned HOST_WIDE_INT max_align;
2361 #ifdef POINTERS_EXTEND_UNSIGNED
2362 scalar_int_mode pointer_mode
2363 = targetm.addr_space.pointer_mode (attrs.addrspace);
2364 #endif
2366 /* VOIDmode means no mode change for change_address_1. */
2367 if (mode == VOIDmode)
2368 mode = GET_MODE (memref);
2370 /* Take the size of non-BLKmode accesses from the mode. */
2371 defattrs = mode_mem_attrs[(int) mode];
2372 if (defattrs->size_known_p)
2373 size = defattrs->size;
2375 /* If there are no changes, just return the original memory reference. */
2376 if (mode == GET_MODE (memref)
2377 && known_eq (offset, 0)
2378 && (known_eq (size, 0)
2379 || (attrs.size_known_p && known_eq (attrs.size, size)))
2380 && (!validate || memory_address_addr_space_p (mode, addr,
2381 attrs.addrspace)))
2382 return memref;
2384 /* ??? Prefer to create garbage instead of creating shared rtl.
2385 This may happen even if offset is nonzero -- consider
2386 (plus (plus reg reg) const_int) -- so do this always. */
2387 addr = copy_rtx (addr);
2389 /* Convert a possibly large offset to a signed value within the
2390 range of the target address space. */
2391 address_mode = get_address_mode (memref);
2392 offset = trunc_int_for_mode (offset, address_mode);
2394 if (adjust_address)
2396 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2397 object, we can merge it into the LO_SUM. */
2398 if (GET_MODE (memref) != BLKmode
2399 && GET_CODE (addr) == LO_SUM
2400 && known_in_range_p (offset,
2401 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2402 / BITS_PER_UNIT)))
2403 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2404 plus_constant (address_mode,
2405 XEXP (addr, 1), offset));
2406 #ifdef POINTERS_EXTEND_UNSIGNED
2407 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2408 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2409 the fact that pointers are not allowed to overflow. */
2410 else if (POINTERS_EXTEND_UNSIGNED > 0
2411 && GET_CODE (addr) == ZERO_EXTEND
2412 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2413 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2414 addr = gen_rtx_ZERO_EXTEND (address_mode,
2415 plus_constant (pointer_mode,
2416 XEXP (addr, 0), offset));
2417 #endif
2418 else
2419 addr = plus_constant (address_mode, addr, offset);
2422 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2424 /* If the address is a REG, change_address_1 rightfully returns memref,
2425 but this would destroy memref's MEM_ATTRS. */
2426 if (new_rtx == memref && maybe_ne (offset, 0))
2427 new_rtx = copy_rtx (new_rtx);
2429 /* Conservatively drop the object if we don't know where we start from. */
2430 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2432 attrs.expr = NULL_TREE;
2433 attrs.alias = 0;
2436 /* Compute the new values of the memory attributes due to this adjustment.
2437 We add the offsets and update the alignment. */
2438 if (attrs.offset_known_p)
2440 attrs.offset += offset;
2442 /* Drop the object if the new left end is not within its bounds. */
2443 if (adjust_object && maybe_lt (attrs.offset, 0))
2445 attrs.expr = NULL_TREE;
2446 attrs.alias = 0;
2450 /* Compute the new alignment by taking the MIN of the alignment and the
2451 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2452 if zero. */
2453 if (maybe_ne (offset, 0))
2455 max_align = known_alignment (offset) * BITS_PER_UNIT;
2456 attrs.align = MIN (attrs.align, max_align);
2459 if (maybe_ne (size, 0))
2461 /* Drop the object if the new right end is not within its bounds. */
2462 if (adjust_object && maybe_gt (offset + size, attrs.size))
2464 attrs.expr = NULL_TREE;
2465 attrs.alias = 0;
2467 attrs.size_known_p = true;
2468 attrs.size = size;
2470 else if (attrs.size_known_p)
2472 gcc_assert (!adjust_object);
2473 attrs.size -= offset;
2474 /* ??? The store_by_pieces machinery generates negative sizes,
2475 so don't assert for that here. */
2478 set_mem_attrs (new_rtx, &attrs);
2480 return new_rtx;
2483 /* Return a memory reference like MEMREF, but with its mode changed
2484 to MODE and its address changed to ADDR, which is assumed to be
2485 MEMREF offset by OFFSET bytes. If VALIDATE is
2486 nonzero, the memory address is forced to be valid. */
2489 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2490 poly_int64 offset, int validate)
2492 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2493 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2496 /* Return a memory reference like MEMREF, but whose address is changed by
2497 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2498 known to be in OFFSET (possibly 1). */
2501 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2503 rtx new_rtx, addr = XEXP (memref, 0);
2504 machine_mode address_mode;
2505 class mem_attrs *defattrs;
2507 mem_attrs attrs (*get_mem_attrs (memref));
2508 address_mode = get_address_mode (memref);
2509 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2511 /* At this point we don't know _why_ the address is invalid. It
2512 could have secondary memory references, multiplies or anything.
2514 However, if we did go and rearrange things, we can wind up not
2515 being able to recognize the magic around pic_offset_table_rtx.
2516 This stuff is fragile, and is yet another example of why it is
2517 bad to expose PIC machinery too early. */
2518 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2519 attrs.addrspace)
2520 && GET_CODE (addr) == PLUS
2521 && XEXP (addr, 0) == pic_offset_table_rtx)
2523 addr = force_reg (GET_MODE (addr), addr);
2524 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2527 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2528 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2530 /* If there are no changes, just return the original memory reference. */
2531 if (new_rtx == memref)
2532 return new_rtx;
2534 /* Update the alignment to reflect the offset. Reset the offset, which
2535 we don't know. */
2536 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2537 attrs.offset_known_p = false;
2538 attrs.size_known_p = defattrs->size_known_p;
2539 attrs.size = defattrs->size;
2540 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2541 set_mem_attrs (new_rtx, &attrs);
2542 return new_rtx;
2545 /* Return a memory reference like MEMREF, but with its address changed to
2546 ADDR. The caller is asserting that the actual piece of memory pointed
2547 to is the same, just the form of the address is being changed, such as
2548 by putting something into a register. INPLACE is true if any changes
2549 can be made directly to MEMREF or false if MEMREF must be treated as
2550 immutable. */
2553 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2555 /* change_address_1 copies the memory attribute structure without change
2556 and that's exactly what we want here. */
2557 update_temp_slot_address (XEXP (memref, 0), addr);
2558 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2561 /* Likewise, but the reference is not required to be valid. */
2564 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2566 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2569 /* Return a memory reference like MEMREF, but with its mode widened to
2570 MODE and offset by OFFSET. This would be used by targets that e.g.
2571 cannot issue QImode memory operations and have to use SImode memory
2572 operations plus masking logic. */
2575 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2577 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2578 poly_uint64 size = GET_MODE_SIZE (mode);
2580 /* If there are no changes, just return the original memory reference. */
2581 if (new_rtx == memref)
2582 return new_rtx;
2584 mem_attrs attrs (*get_mem_attrs (new_rtx));
2586 /* If we don't know what offset we were at within the expression, then
2587 we can't know if we've overstepped the bounds. */
2588 if (! attrs.offset_known_p)
2589 attrs.expr = NULL_TREE;
2591 while (attrs.expr)
2593 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2595 tree field = TREE_OPERAND (attrs.expr, 1);
2596 tree offset = component_ref_field_offset (attrs.expr);
2598 if (! DECL_SIZE_UNIT (field))
2600 attrs.expr = NULL_TREE;
2601 break;
2604 /* Is the field at least as large as the access? If so, ok,
2605 otherwise strip back to the containing structure. */
2606 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2607 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2608 && known_ge (attrs.offset, 0))
2609 break;
2611 poly_uint64 suboffset;
2612 if (!poly_int_tree_p (offset, &suboffset))
2614 attrs.expr = NULL_TREE;
2615 break;
2618 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2619 attrs.offset += suboffset;
2620 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2621 / BITS_PER_UNIT);
2623 /* Similarly for the decl. */
2624 else if (DECL_P (attrs.expr)
2625 && DECL_SIZE_UNIT (attrs.expr)
2626 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2627 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2628 size)
2629 && known_ge (attrs.offset, 0))
2630 break;
2631 else
2633 /* The widened memory access overflows the expression, which means
2634 that it could alias another expression. Zap it. */
2635 attrs.expr = NULL_TREE;
2636 break;
2640 if (! attrs.expr)
2641 attrs.offset_known_p = false;
2643 /* The widened memory may alias other stuff, so zap the alias set. */
2644 /* ??? Maybe use get_alias_set on any remaining expression. */
2645 attrs.alias = 0;
2646 attrs.size_known_p = true;
2647 attrs.size = size;
2648 set_mem_attrs (new_rtx, &attrs);
2649 return new_rtx;
2652 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2653 static GTY(()) tree spill_slot_decl;
2655 tree
2656 get_spill_slot_decl (bool force_build_p)
2658 tree d = spill_slot_decl;
2659 rtx rd;
2661 if (d || !force_build_p)
2662 return d;
2664 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2665 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2666 DECL_ARTIFICIAL (d) = 1;
2667 DECL_IGNORED_P (d) = 1;
2668 TREE_USED (d) = 1;
2669 spill_slot_decl = d;
2671 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2672 MEM_NOTRAP_P (rd) = 1;
2673 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2674 attrs.alias = new_alias_set ();
2675 attrs.expr = d;
2676 set_mem_attrs (rd, &attrs);
2677 SET_DECL_RTL (d, rd);
2679 return d;
2682 /* Given MEM, a result from assign_stack_local, fill in the memory
2683 attributes as appropriate for a register allocator spill slot.
2684 These slots are not aliasable by other memory. We arrange for
2685 them all to use a single MEM_EXPR, so that the aliasing code can
2686 work properly in the case of shared spill slots. */
2688 void
2689 set_mem_attrs_for_spill (rtx mem)
2691 rtx addr;
2693 mem_attrs attrs (*get_mem_attrs (mem));
2694 attrs.expr = get_spill_slot_decl (true);
2695 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2696 attrs.addrspace = ADDR_SPACE_GENERIC;
2698 /* We expect the incoming memory to be of the form:
2699 (mem:MODE (plus (reg sfp) (const_int offset)))
2700 with perhaps the plus missing for offset = 0. */
2701 addr = XEXP (mem, 0);
2702 attrs.offset_known_p = true;
2703 strip_offset (addr, &attrs.offset);
2705 set_mem_attrs (mem, &attrs);
2706 MEM_NOTRAP_P (mem) = 1;
2709 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2711 rtx_code_label *
2712 gen_label_rtx (void)
2714 return as_a <rtx_code_label *> (
2715 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2716 NULL, label_num++, NULL));
2719 /* For procedure integration. */
2721 /* Install new pointers to the first and last insns in the chain.
2722 Also, set cur_insn_uid to one higher than the last in use.
2723 Used for an inline-procedure after copying the insn chain. */
2725 void
2726 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2728 rtx_insn *insn;
2730 set_first_insn (first);
2731 set_last_insn (last);
2732 cur_insn_uid = 0;
2734 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2736 int debug_count = 0;
2738 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2739 cur_debug_insn_uid = 0;
2741 for (insn = first; insn; insn = NEXT_INSN (insn))
2742 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2743 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2744 else
2746 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2747 if (DEBUG_INSN_P (insn))
2748 debug_count++;
2751 if (debug_count)
2752 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2753 else
2754 cur_debug_insn_uid++;
2756 else
2757 for (insn = first; insn; insn = NEXT_INSN (insn))
2758 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2760 cur_insn_uid++;
2763 /* Go through all the RTL insn bodies and copy any invalid shared
2764 structure. This routine should only be called once. */
2766 static void
2767 unshare_all_rtl_1 (rtx_insn *insn)
2769 /* Unshare just about everything else. */
2770 unshare_all_rtl_in_chain (insn);
2772 /* Make sure the addresses of stack slots found outside the insn chain
2773 (such as, in DECL_RTL of a variable) are not shared
2774 with the insn chain.
2776 This special care is necessary when the stack slot MEM does not
2777 actually appear in the insn chain. If it does appear, its address
2778 is unshared from all else at that point. */
2779 unsigned int i;
2780 rtx temp;
2781 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2782 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2785 /* Go through all the RTL insn bodies and copy any invalid shared
2786 structure, again. This is a fairly expensive thing to do so it
2787 should be done sparingly. */
2789 void
2790 unshare_all_rtl_again (rtx_insn *insn)
2792 rtx_insn *p;
2793 tree decl;
2795 for (p = insn; p; p = NEXT_INSN (p))
2796 if (INSN_P (p))
2798 reset_used_flags (PATTERN (p));
2799 reset_used_flags (REG_NOTES (p));
2800 if (CALL_P (p))
2801 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2804 /* Make sure that virtual stack slots are not shared. */
2805 set_used_decls (DECL_INITIAL (cfun->decl));
2807 /* Make sure that virtual parameters are not shared. */
2808 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2809 set_used_flags (DECL_RTL (decl));
2811 rtx temp;
2812 unsigned int i;
2813 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2814 reset_used_flags (temp);
2816 unshare_all_rtl_1 (insn);
2819 unsigned int
2820 unshare_all_rtl (void)
2822 unshare_all_rtl_1 (get_insns ());
2824 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2826 if (DECL_RTL_SET_P (decl))
2827 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2828 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2831 return 0;
2835 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2836 Recursively does the same for subexpressions. */
2838 static void
2839 verify_rtx_sharing (rtx orig, rtx insn)
2841 rtx x = orig;
2842 int i;
2843 enum rtx_code code;
2844 const char *format_ptr;
2846 if (x == 0)
2847 return;
2849 code = GET_CODE (x);
2851 /* These types may be freely shared. */
2853 switch (code)
2855 case REG:
2856 case DEBUG_EXPR:
2857 case VALUE:
2858 CASE_CONST_ANY:
2859 case SYMBOL_REF:
2860 case LABEL_REF:
2861 case CODE_LABEL:
2862 case PC:
2863 case RETURN:
2864 case SIMPLE_RETURN:
2865 case SCRATCH:
2866 /* SCRATCH must be shared because they represent distinct values. */
2867 return;
2868 case CLOBBER:
2869 /* Share clobbers of hard registers, but do not share pseudo reg
2870 clobbers or clobbers of hard registers that originated as pseudos.
2871 This is needed to allow safe register renaming. */
2872 if (REG_P (XEXP (x, 0))
2873 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2874 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2875 return;
2876 break;
2878 case CONST:
2879 if (shared_const_p (orig))
2880 return;
2881 break;
2883 case MEM:
2884 /* A MEM is allowed to be shared if its address is constant. */
2885 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2886 || reload_completed || reload_in_progress)
2887 return;
2889 break;
2891 default:
2892 break;
2895 /* This rtx may not be shared. If it has already been seen,
2896 replace it with a copy of itself. */
2897 if (flag_checking && RTX_FLAG (x, used))
2899 error ("invalid rtl sharing found in the insn");
2900 debug_rtx (insn);
2901 error ("shared rtx");
2902 debug_rtx (x);
2903 internal_error ("internal consistency failure");
2905 gcc_assert (!RTX_FLAG (x, used));
2907 RTX_FLAG (x, used) = 1;
2909 /* Now scan the subexpressions recursively. */
2911 format_ptr = GET_RTX_FORMAT (code);
2913 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2915 switch (*format_ptr++)
2917 case 'e':
2918 verify_rtx_sharing (XEXP (x, i), insn);
2919 break;
2921 case 'E':
2922 if (XVEC (x, i) != NULL)
2924 int j;
2925 int len = XVECLEN (x, i);
2927 for (j = 0; j < len; j++)
2929 /* We allow sharing of ASM_OPERANDS inside single
2930 instruction. */
2931 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2932 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2933 == ASM_OPERANDS))
2934 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2935 else
2936 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2939 break;
2942 return;
2945 /* Reset used-flags for INSN. */
2947 static void
2948 reset_insn_used_flags (rtx insn)
2950 gcc_assert (INSN_P (insn));
2951 reset_used_flags (PATTERN (insn));
2952 reset_used_flags (REG_NOTES (insn));
2953 if (CALL_P (insn))
2954 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2957 /* Go through all the RTL insn bodies and clear all the USED bits. */
2959 static void
2960 reset_all_used_flags (void)
2962 rtx_insn *p;
2964 for (p = get_insns (); p; p = NEXT_INSN (p))
2965 if (INSN_P (p))
2967 rtx pat = PATTERN (p);
2968 if (GET_CODE (pat) != SEQUENCE)
2969 reset_insn_used_flags (p);
2970 else
2972 gcc_assert (REG_NOTES (p) == NULL);
2973 for (int i = 0; i < XVECLEN (pat, 0); i++)
2975 rtx insn = XVECEXP (pat, 0, i);
2976 if (INSN_P (insn))
2977 reset_insn_used_flags (insn);
2983 /* Verify sharing in INSN. */
2985 static void
2986 verify_insn_sharing (rtx insn)
2988 gcc_assert (INSN_P (insn));
2989 verify_rtx_sharing (PATTERN (insn), insn);
2990 verify_rtx_sharing (REG_NOTES (insn), insn);
2991 if (CALL_P (insn))
2992 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2995 /* Go through all the RTL insn bodies and check that there is no unexpected
2996 sharing in between the subexpressions. */
2998 DEBUG_FUNCTION void
2999 verify_rtl_sharing (void)
3001 rtx_insn *p;
3003 timevar_push (TV_VERIFY_RTL_SHARING);
3005 reset_all_used_flags ();
3007 for (p = get_insns (); p; p = NEXT_INSN (p))
3008 if (INSN_P (p))
3010 rtx pat = PATTERN (p);
3011 if (GET_CODE (pat) != SEQUENCE)
3012 verify_insn_sharing (p);
3013 else
3014 for (int i = 0; i < XVECLEN (pat, 0); i++)
3016 rtx insn = XVECEXP (pat, 0, i);
3017 if (INSN_P (insn))
3018 verify_insn_sharing (insn);
3022 reset_all_used_flags ();
3024 timevar_pop (TV_VERIFY_RTL_SHARING);
3027 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3028 Assumes the mark bits are cleared at entry. */
3030 void
3031 unshare_all_rtl_in_chain (rtx_insn *insn)
3033 for (; insn; insn = NEXT_INSN (insn))
3034 if (INSN_P (insn))
3036 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3037 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3038 if (CALL_P (insn))
3039 CALL_INSN_FUNCTION_USAGE (insn)
3040 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3044 /* Go through all virtual stack slots of a function and mark them as
3045 shared. We never replace the DECL_RTLs themselves with a copy,
3046 but expressions mentioned into a DECL_RTL cannot be shared with
3047 expressions in the instruction stream.
3049 Note that reload may convert pseudo registers into memories in-place.
3050 Pseudo registers are always shared, but MEMs never are. Thus if we
3051 reset the used flags on MEMs in the instruction stream, we must set
3052 them again on MEMs that appear in DECL_RTLs. */
3054 static void
3055 set_used_decls (tree blk)
3057 tree t;
3059 /* Mark decls. */
3060 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3061 if (DECL_RTL_SET_P (t))
3062 set_used_flags (DECL_RTL (t));
3064 /* Now process sub-blocks. */
3065 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3066 set_used_decls (t);
3069 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3070 Recursively does the same for subexpressions. Uses
3071 copy_rtx_if_shared_1 to reduce stack space. */
3074 copy_rtx_if_shared (rtx orig)
3076 copy_rtx_if_shared_1 (&orig);
3077 return orig;
3080 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3081 use. Recursively does the same for subexpressions. */
3083 static void
3084 copy_rtx_if_shared_1 (rtx *orig1)
3086 rtx x;
3087 int i;
3088 enum rtx_code code;
3089 rtx *last_ptr;
3090 const char *format_ptr;
3091 int copied = 0;
3092 int length;
3094 /* Repeat is used to turn tail-recursion into iteration. */
3095 repeat:
3096 x = *orig1;
3098 if (x == 0)
3099 return;
3101 code = GET_CODE (x);
3103 /* These types may be freely shared. */
3105 switch (code)
3107 case REG:
3108 case DEBUG_EXPR:
3109 case VALUE:
3110 CASE_CONST_ANY:
3111 case SYMBOL_REF:
3112 case LABEL_REF:
3113 case CODE_LABEL:
3114 case PC:
3115 case RETURN:
3116 case SIMPLE_RETURN:
3117 case SCRATCH:
3118 /* SCRATCH must be shared because they represent distinct values. */
3119 return;
3120 case CLOBBER:
3121 /* Share clobbers of hard registers, but do not share pseudo reg
3122 clobbers or clobbers of hard registers that originated as pseudos.
3123 This is needed to allow safe register renaming. */
3124 if (REG_P (XEXP (x, 0))
3125 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3126 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3127 return;
3128 break;
3130 case CONST:
3131 if (shared_const_p (x))
3132 return;
3133 break;
3135 case DEBUG_INSN:
3136 case INSN:
3137 case JUMP_INSN:
3138 case CALL_INSN:
3139 case NOTE:
3140 case BARRIER:
3141 /* The chain of insns is not being copied. */
3142 return;
3144 default:
3145 break;
3148 /* This rtx may not be shared. If it has already been seen,
3149 replace it with a copy of itself. */
3151 if (RTX_FLAG (x, used))
3153 x = shallow_copy_rtx (x);
3154 copied = 1;
3156 RTX_FLAG (x, used) = 1;
3158 /* Now scan the subexpressions recursively.
3159 We can store any replaced subexpressions directly into X
3160 since we know X is not shared! Any vectors in X
3161 must be copied if X was copied. */
3163 format_ptr = GET_RTX_FORMAT (code);
3164 length = GET_RTX_LENGTH (code);
3165 last_ptr = NULL;
3167 for (i = 0; i < length; i++)
3169 switch (*format_ptr++)
3171 case 'e':
3172 if (last_ptr)
3173 copy_rtx_if_shared_1 (last_ptr);
3174 last_ptr = &XEXP (x, i);
3175 break;
3177 case 'E':
3178 if (XVEC (x, i) != NULL)
3180 int j;
3181 int len = XVECLEN (x, i);
3183 /* Copy the vector iff I copied the rtx and the length
3184 is nonzero. */
3185 if (copied && len > 0)
3186 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3188 /* Call recursively on all inside the vector. */
3189 for (j = 0; j < len; j++)
3191 if (last_ptr)
3192 copy_rtx_if_shared_1 (last_ptr);
3193 last_ptr = &XVECEXP (x, i, j);
3196 break;
3199 *orig1 = x;
3200 if (last_ptr)
3202 orig1 = last_ptr;
3203 goto repeat;
3205 return;
3208 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3210 static void
3211 mark_used_flags (rtx x, int flag)
3213 int i, j;
3214 enum rtx_code code;
3215 const char *format_ptr;
3216 int length;
3218 /* Repeat is used to turn tail-recursion into iteration. */
3219 repeat:
3220 if (x == 0)
3221 return;
3223 code = GET_CODE (x);
3225 /* These types may be freely shared so we needn't do any resetting
3226 for them. */
3228 switch (code)
3230 case REG:
3231 case DEBUG_EXPR:
3232 case VALUE:
3233 CASE_CONST_ANY:
3234 case SYMBOL_REF:
3235 case CODE_LABEL:
3236 case PC:
3237 case RETURN:
3238 case SIMPLE_RETURN:
3239 return;
3241 case DEBUG_INSN:
3242 case INSN:
3243 case JUMP_INSN:
3244 case CALL_INSN:
3245 case NOTE:
3246 case LABEL_REF:
3247 case BARRIER:
3248 /* The chain of insns is not being copied. */
3249 return;
3251 default:
3252 break;
3255 RTX_FLAG (x, used) = flag;
3257 format_ptr = GET_RTX_FORMAT (code);
3258 length = GET_RTX_LENGTH (code);
3260 for (i = 0; i < length; i++)
3262 switch (*format_ptr++)
3264 case 'e':
3265 if (i == length-1)
3267 x = XEXP (x, i);
3268 goto repeat;
3270 mark_used_flags (XEXP (x, i), flag);
3271 break;
3273 case 'E':
3274 for (j = 0; j < XVECLEN (x, i); j++)
3275 mark_used_flags (XVECEXP (x, i, j), flag);
3276 break;
3281 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3282 to look for shared sub-parts. */
3284 void
3285 reset_used_flags (rtx x)
3287 mark_used_flags (x, 0);
3290 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3291 to look for shared sub-parts. */
3293 void
3294 set_used_flags (rtx x)
3296 mark_used_flags (x, 1);
3299 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3300 Return X or the rtx for the pseudo reg the value of X was copied into.
3301 OTHER must be valid as a SET_DEST. */
3304 make_safe_from (rtx x, rtx other)
3306 while (1)
3307 switch (GET_CODE (other))
3309 case SUBREG:
3310 other = SUBREG_REG (other);
3311 break;
3312 case STRICT_LOW_PART:
3313 case SIGN_EXTEND:
3314 case ZERO_EXTEND:
3315 other = XEXP (other, 0);
3316 break;
3317 default:
3318 goto done;
3320 done:
3321 if ((MEM_P (other)
3322 && ! CONSTANT_P (x)
3323 && !REG_P (x)
3324 && GET_CODE (x) != SUBREG)
3325 || (REG_P (other)
3326 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3327 || reg_mentioned_p (other, x))))
3329 rtx temp = gen_reg_rtx (GET_MODE (x));
3330 emit_move_insn (temp, x);
3331 return temp;
3333 return x;
3336 /* Emission of insns (adding them to the doubly-linked list). */
3338 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3340 rtx_insn *
3341 get_last_insn_anywhere (void)
3343 struct sequence_stack *seq;
3344 for (seq = get_current_sequence (); seq; seq = seq->next)
3345 if (seq->last != 0)
3346 return seq->last;
3347 return 0;
3350 /* Return the first nonnote insn emitted in current sequence or current
3351 function. This routine looks inside SEQUENCEs. */
3353 rtx_insn *
3354 get_first_nonnote_insn (void)
3356 rtx_insn *insn = get_insns ();
3358 if (insn)
3360 if (NOTE_P (insn))
3361 for (insn = next_insn (insn);
3362 insn && NOTE_P (insn);
3363 insn = next_insn (insn))
3364 continue;
3365 else
3367 if (NONJUMP_INSN_P (insn)
3368 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3369 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3373 return insn;
3376 /* Return the last nonnote insn emitted in current sequence or current
3377 function. This routine looks inside SEQUENCEs. */
3379 rtx_insn *
3380 get_last_nonnote_insn (void)
3382 rtx_insn *insn = get_last_insn ();
3384 if (insn)
3386 if (NOTE_P (insn))
3387 for (insn = previous_insn (insn);
3388 insn && NOTE_P (insn);
3389 insn = previous_insn (insn))
3390 continue;
3391 else
3393 if (NONJUMP_INSN_P (insn))
3394 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3395 insn = seq->insn (seq->len () - 1);
3399 return insn;
3402 /* Return the number of actual (non-debug) insns emitted in this
3403 function. */
3406 get_max_insn_count (void)
3408 int n = cur_insn_uid;
3410 /* The table size must be stable across -g, to avoid codegen
3411 differences due to debug insns, and not be affected by
3412 -fmin-insn-uid, to avoid excessive table size and to simplify
3413 debugging of -fcompare-debug failures. */
3414 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3415 n -= cur_debug_insn_uid;
3416 else
3417 n -= param_min_nondebug_insn_uid;
3419 return n;
3423 /* Return the next insn. If it is a SEQUENCE, return the first insn
3424 of the sequence. */
3426 rtx_insn *
3427 next_insn (rtx_insn *insn)
3429 if (insn)
3431 insn = NEXT_INSN (insn);
3432 if (insn && NONJUMP_INSN_P (insn)
3433 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3434 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3437 return insn;
3440 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3441 of the sequence. */
3443 rtx_insn *
3444 previous_insn (rtx_insn *insn)
3446 if (insn)
3448 insn = PREV_INSN (insn);
3449 if (insn && NONJUMP_INSN_P (insn))
3450 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3451 insn = seq->insn (seq->len () - 1);
3454 return insn;
3457 /* Return the next insn after INSN that is not a NOTE. This routine does not
3458 look inside SEQUENCEs. */
3460 rtx_insn *
3461 next_nonnote_insn (rtx_insn *insn)
3463 while (insn)
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || !NOTE_P (insn))
3467 break;
3470 return insn;
3473 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3474 routine does not look inside SEQUENCEs. */
3476 rtx_insn *
3477 next_nondebug_insn (rtx_insn *insn)
3479 while (insn)
3481 insn = NEXT_INSN (insn);
3482 if (insn == 0 || !DEBUG_INSN_P (insn))
3483 break;
3486 return insn;
3489 /* Return the previous insn before INSN that is not a NOTE. This routine does
3490 not look inside SEQUENCEs. */
3492 rtx_insn *
3493 prev_nonnote_insn (rtx_insn *insn)
3495 while (insn)
3497 insn = PREV_INSN (insn);
3498 if (insn == 0 || !NOTE_P (insn))
3499 break;
3502 return insn;
3505 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3506 This routine does not look inside SEQUENCEs. */
3508 rtx_insn *
3509 prev_nondebug_insn (rtx_insn *insn)
3511 while (insn)
3513 insn = PREV_INSN (insn);
3514 if (insn == 0 || !DEBUG_INSN_P (insn))
3515 break;
3518 return insn;
3521 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3522 This routine does not look inside SEQUENCEs. */
3524 rtx_insn *
3525 next_nonnote_nondebug_insn (rtx_insn *insn)
3527 while (insn)
3529 insn = NEXT_INSN (insn);
3530 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3531 break;
3534 return insn;
3537 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3538 but stop the search before we enter another basic block. This
3539 routine does not look inside SEQUENCEs. */
3541 rtx_insn *
3542 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544 while (insn)
3546 insn = NEXT_INSN (insn);
3547 if (insn == 0)
3548 break;
3549 if (DEBUG_INSN_P (insn))
3550 continue;
3551 if (!NOTE_P (insn))
3552 break;
3553 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3554 return NULL;
3557 return insn;
3560 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3561 This routine does not look inside SEQUENCEs. */
3563 rtx_insn *
3564 prev_nonnote_nondebug_insn (rtx_insn *insn)
3566 while (insn)
3568 insn = PREV_INSN (insn);
3569 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3570 break;
3573 return insn;
3576 /* Return the previous insn before INSN that is not a NOTE nor
3577 DEBUG_INSN, but stop the search before we enter another basic
3578 block. This routine does not look inside SEQUENCEs. */
3580 rtx_insn *
3581 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583 while (insn)
3585 insn = PREV_INSN (insn);
3586 if (insn == 0)
3587 break;
3588 if (DEBUG_INSN_P (insn))
3589 continue;
3590 if (!NOTE_P (insn))
3591 break;
3592 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3593 return NULL;
3596 return insn;
3599 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3600 or 0, if there is none. This routine does not look inside
3601 SEQUENCEs. */
3603 rtx_insn *
3604 next_real_insn (rtx_insn *insn)
3606 while (insn)
3608 insn = NEXT_INSN (insn);
3609 if (insn == 0 || INSN_P (insn))
3610 break;
3613 return insn;
3616 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3617 or 0, if there is none. This routine does not look inside
3618 SEQUENCEs. */
3620 rtx_insn *
3621 prev_real_insn (rtx_insn *insn)
3623 while (insn)
3625 insn = PREV_INSN (insn);
3626 if (insn == 0 || INSN_P (insn))
3627 break;
3630 return insn;
3633 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3634 or 0, if there is none. This routine does not look inside
3635 SEQUENCEs. */
3637 rtx_insn *
3638 next_real_nondebug_insn (rtx uncast_insn)
3640 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3642 while (insn)
3644 insn = NEXT_INSN (insn);
3645 if (insn == 0 || NONDEBUG_INSN_P (insn))
3646 break;
3649 return insn;
3652 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3653 or 0, if there is none. This routine does not look inside
3654 SEQUENCEs. */
3656 rtx_insn *
3657 prev_real_nondebug_insn (rtx_insn *insn)
3659 while (insn)
3661 insn = PREV_INSN (insn);
3662 if (insn == 0 || NONDEBUG_INSN_P (insn))
3663 break;
3666 return insn;
3669 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3670 This routine does not look inside SEQUENCEs. */
3672 rtx_call_insn *
3673 last_call_insn (void)
3675 rtx_insn *insn;
3677 for (insn = get_last_insn ();
3678 insn && !CALL_P (insn);
3679 insn = PREV_INSN (insn))
3682 return safe_as_a <rtx_call_insn *> (insn);
3685 /* Find the next insn after INSN that really does something. This routine
3686 does not look inside SEQUENCEs. After reload this also skips over
3687 standalone USE and CLOBBER insn. */
3690 active_insn_p (const rtx_insn *insn)
3692 return (CALL_P (insn) || JUMP_P (insn)
3693 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3694 || (NONJUMP_INSN_P (insn)
3695 && (! reload_completed
3696 || (GET_CODE (PATTERN (insn)) != USE
3697 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3700 rtx_insn *
3701 next_active_insn (rtx_insn *insn)
3703 while (insn)
3705 insn = NEXT_INSN (insn);
3706 if (insn == 0 || active_insn_p (insn))
3707 break;
3710 return insn;
3713 /* Find the last insn before INSN that really does something. This routine
3714 does not look inside SEQUENCEs. After reload this also skips over
3715 standalone USE and CLOBBER insn. */
3717 rtx_insn *
3718 prev_active_insn (rtx_insn *insn)
3720 while (insn)
3722 insn = PREV_INSN (insn);
3723 if (insn == 0 || active_insn_p (insn))
3724 break;
3727 return insn;
3730 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3732 static int
3733 find_auto_inc (const_rtx x, const_rtx reg)
3735 subrtx_iterator::array_type array;
3736 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3738 const_rtx x = *iter;
3739 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3740 && rtx_equal_p (reg, XEXP (x, 0)))
3741 return true;
3743 return false;
3746 /* Increment the label uses for all labels present in rtx. */
3748 static void
3749 mark_label_nuses (rtx x)
3751 enum rtx_code code;
3752 int i, j;
3753 const char *fmt;
3755 code = GET_CODE (x);
3756 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3757 LABEL_NUSES (label_ref_label (x))++;
3759 fmt = GET_RTX_FORMAT (code);
3760 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3762 if (fmt[i] == 'e')
3763 mark_label_nuses (XEXP (x, i));
3764 else if (fmt[i] == 'E')
3765 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3766 mark_label_nuses (XVECEXP (x, i, j));
3771 /* Try splitting insns that can be split for better scheduling.
3772 PAT is the pattern which might split.
3773 TRIAL is the insn providing PAT.
3774 LAST is nonzero if we should return the last insn of the sequence produced.
3776 If this routine succeeds in splitting, it returns the first or last
3777 replacement insn depending on the value of LAST. Otherwise, it
3778 returns TRIAL. If the insn to be returned can be split, it will be. */
3780 rtx_insn *
3781 try_split (rtx pat, rtx_insn *trial, int last)
3783 rtx_insn *before, *after;
3784 rtx note;
3785 rtx_insn *seq, *tem;
3786 profile_probability probability;
3787 rtx_insn *insn_last, *insn;
3788 int njumps = 0;
3789 rtx_insn *call_insn = NULL;
3791 if (any_condjump_p (trial)
3792 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3793 split_branch_probability
3794 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3795 else
3796 split_branch_probability = profile_probability::uninitialized ();
3798 probability = split_branch_probability;
3800 seq = split_insns (pat, trial);
3802 split_branch_probability = profile_probability::uninitialized ();
3804 if (!seq)
3805 return trial;
3807 int split_insn_count = 0;
3808 /* Avoid infinite loop if any insn of the result matches
3809 the original pattern. */
3810 insn_last = seq;
3811 while (1)
3813 if (INSN_P (insn_last)
3814 && rtx_equal_p (PATTERN (insn_last), pat))
3815 return trial;
3816 split_insn_count++;
3817 if (!NEXT_INSN (insn_last))
3818 break;
3819 insn_last = NEXT_INSN (insn_last);
3822 /* We're not good at redistributing frame information if
3823 the split occurs before reload or if it results in more
3824 than one insn. */
3825 if (RTX_FRAME_RELATED_P (trial))
3827 if (!reload_completed || split_insn_count != 1)
3828 return trial;
3830 rtx_insn *new_insn = seq;
3831 rtx_insn *old_insn = trial;
3832 copy_frame_info_to_split_insn (old_insn, new_insn);
3835 /* We will be adding the new sequence to the function. The splitters
3836 may have introduced invalid RTL sharing, so unshare the sequence now. */
3837 unshare_all_rtl_in_chain (seq);
3839 /* Mark labels and copy flags. */
3840 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3842 if (JUMP_P (insn))
3844 if (JUMP_P (trial))
3845 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3846 mark_jump_label (PATTERN (insn), insn, 0);
3847 njumps++;
3848 if (probability.initialized_p ()
3849 && any_condjump_p (insn)
3850 && !find_reg_note (insn, REG_BR_PROB, 0))
3852 /* We can preserve the REG_BR_PROB notes only if exactly
3853 one jump is created, otherwise the machine description
3854 is responsible for this step using
3855 split_branch_probability variable. */
3856 gcc_assert (njumps == 1);
3857 add_reg_br_prob_note (insn, probability);
3862 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3863 in SEQ and copy any additional information across. */
3864 if (CALL_P (trial))
3866 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3867 if (CALL_P (insn))
3869 gcc_assert (call_insn == NULL_RTX);
3870 call_insn = insn;
3872 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3873 target may have explicitly specified. */
3874 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3875 while (*p)
3876 p = &XEXP (*p, 1);
3877 *p = CALL_INSN_FUNCTION_USAGE (trial);
3879 /* If the old call was a sibling call, the new one must
3880 be too. */
3881 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3885 /* Copy notes, particularly those related to the CFG. */
3886 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3888 switch (REG_NOTE_KIND (note))
3890 case REG_EH_REGION:
3891 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3892 break;
3894 case REG_NORETURN:
3895 case REG_SETJMP:
3896 case REG_TM:
3897 case REG_CALL_NOCF_CHECK:
3898 case REG_CALL_ARG_LOCATION:
3899 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3901 if (CALL_P (insn))
3902 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3904 break;
3906 case REG_NON_LOCAL_GOTO:
3907 case REG_LABEL_TARGET:
3908 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3910 if (JUMP_P (insn))
3911 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3913 break;
3915 case REG_INC:
3916 if (!AUTO_INC_DEC)
3917 break;
3919 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3921 rtx reg = XEXP (note, 0);
3922 if (!FIND_REG_INC_NOTE (insn, reg)
3923 && find_auto_inc (PATTERN (insn), reg))
3924 add_reg_note (insn, REG_INC, reg);
3926 break;
3928 case REG_ARGS_SIZE:
3929 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3930 break;
3932 case REG_CALL_DECL:
3933 case REG_UNTYPED_CALL:
3934 gcc_assert (call_insn != NULL_RTX);
3935 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3936 break;
3938 default:
3939 break;
3943 /* If there are LABELS inside the split insns increment the
3944 usage count so we don't delete the label. */
3945 if (INSN_P (trial))
3947 insn = insn_last;
3948 while (insn != NULL_RTX)
3950 /* JUMP_P insns have already been "marked" above. */
3951 if (NONJUMP_INSN_P (insn))
3952 mark_label_nuses (PATTERN (insn));
3954 insn = PREV_INSN (insn);
3958 before = PREV_INSN (trial);
3959 after = NEXT_INSN (trial);
3961 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3963 delete_insn (trial);
3965 /* Recursively call try_split for each new insn created; by the
3966 time control returns here that insn will be fully split, so
3967 set LAST and continue from the insn after the one returned.
3968 We can't use next_active_insn here since AFTER may be a note.
3969 Ignore deleted insns, which can be occur if not optimizing. */
3970 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3971 if (! tem->deleted () && INSN_P (tem))
3972 tem = try_split (PATTERN (tem), tem, 1);
3974 /* Return either the first or the last insn, depending on which was
3975 requested. */
3976 return last
3977 ? (after ? PREV_INSN (after) : get_last_insn ())
3978 : NEXT_INSN (before);
3981 /* Make and return an INSN rtx, initializing all its slots.
3982 Store PATTERN in the pattern slots. */
3984 rtx_insn *
3985 make_insn_raw (rtx pattern)
3987 rtx_insn *insn;
3989 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3991 INSN_UID (insn) = cur_insn_uid++;
3992 PATTERN (insn) = pattern;
3993 INSN_CODE (insn) = -1;
3994 REG_NOTES (insn) = NULL;
3995 INSN_LOCATION (insn) = curr_insn_location ();
3996 BLOCK_FOR_INSN (insn) = NULL;
3998 #ifdef ENABLE_RTL_CHECKING
3999 if (insn
4000 && INSN_P (insn)
4001 && (returnjump_p (insn)
4002 || (GET_CODE (insn) == SET
4003 && SET_DEST (insn) == pc_rtx)))
4005 warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
4006 debug_rtx (insn);
4008 #endif
4010 return insn;
4013 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4015 static rtx_insn *
4016 make_debug_insn_raw (rtx pattern)
4018 rtx_debug_insn *insn;
4020 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4021 INSN_UID (insn) = cur_debug_insn_uid++;
4022 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4023 INSN_UID (insn) = cur_insn_uid++;
4025 PATTERN (insn) = pattern;
4026 INSN_CODE (insn) = -1;
4027 REG_NOTES (insn) = NULL;
4028 INSN_LOCATION (insn) = curr_insn_location ();
4029 BLOCK_FOR_INSN (insn) = NULL;
4031 return insn;
4034 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4036 static rtx_insn *
4037 make_jump_insn_raw (rtx pattern)
4039 rtx_jump_insn *insn;
4041 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4042 INSN_UID (insn) = cur_insn_uid++;
4044 PATTERN (insn) = pattern;
4045 INSN_CODE (insn) = -1;
4046 REG_NOTES (insn) = NULL;
4047 JUMP_LABEL (insn) = NULL;
4048 INSN_LOCATION (insn) = curr_insn_location ();
4049 BLOCK_FOR_INSN (insn) = NULL;
4051 return insn;
4054 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4056 static rtx_insn *
4057 make_call_insn_raw (rtx pattern)
4059 rtx_call_insn *insn;
4061 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4062 INSN_UID (insn) = cur_insn_uid++;
4064 PATTERN (insn) = pattern;
4065 INSN_CODE (insn) = -1;
4066 REG_NOTES (insn) = NULL;
4067 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4068 INSN_LOCATION (insn) = curr_insn_location ();
4069 BLOCK_FOR_INSN (insn) = NULL;
4071 return insn;
4074 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4076 static rtx_note *
4077 make_note_raw (enum insn_note subtype)
4079 /* Some notes are never created this way at all. These notes are
4080 only created by patching out insns. */
4081 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4082 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4084 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4085 INSN_UID (note) = cur_insn_uid++;
4086 NOTE_KIND (note) = subtype;
4087 BLOCK_FOR_INSN (note) = NULL;
4088 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4089 return note;
4092 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4093 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4094 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4096 static inline void
4097 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4099 SET_PREV_INSN (insn) = prev;
4100 SET_NEXT_INSN (insn) = next;
4101 if (prev != NULL)
4103 SET_NEXT_INSN (prev) = insn;
4104 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4106 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4107 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4110 if (next != NULL)
4112 SET_PREV_INSN (next) = insn;
4113 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4115 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4116 SET_PREV_INSN (sequence->insn (0)) = insn;
4120 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4122 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4123 SET_PREV_INSN (sequence->insn (0)) = prev;
4124 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4128 /* Add INSN to the end of the doubly-linked list.
4129 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4131 void
4132 add_insn (rtx_insn *insn)
4134 rtx_insn *prev = get_last_insn ();
4135 link_insn_into_chain (insn, prev, NULL);
4136 if (get_insns () == NULL)
4137 set_first_insn (insn);
4138 set_last_insn (insn);
4141 /* Add INSN into the doubly-linked list after insn AFTER. */
4143 static void
4144 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4146 rtx_insn *next = NEXT_INSN (after);
4148 gcc_assert (!optimize || !after->deleted ());
4150 link_insn_into_chain (insn, after, next);
4152 if (next == NULL)
4154 struct sequence_stack *seq;
4156 for (seq = get_current_sequence (); seq; seq = seq->next)
4157 if (after == seq->last)
4159 seq->last = insn;
4160 break;
4165 /* Add INSN into the doubly-linked list before insn BEFORE. */
4167 static void
4168 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4170 rtx_insn *prev = PREV_INSN (before);
4172 gcc_assert (!optimize || !before->deleted ());
4174 link_insn_into_chain (insn, prev, before);
4176 if (prev == NULL)
4178 struct sequence_stack *seq;
4180 for (seq = get_current_sequence (); seq; seq = seq->next)
4181 if (before == seq->first)
4183 seq->first = insn;
4184 break;
4187 gcc_assert (seq);
4191 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4192 If BB is NULL, an attempt is made to infer the bb from before.
4194 This and the next function should be the only functions called
4195 to insert an insn once delay slots have been filled since only
4196 they know how to update a SEQUENCE. */
4198 void
4199 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4201 add_insn_after_nobb (insn, after);
4202 if (!BARRIER_P (after)
4203 && !BARRIER_P (insn)
4204 && (bb = BLOCK_FOR_INSN (after)))
4206 set_block_for_insn (insn, bb);
4207 if (INSN_P (insn))
4208 df_insn_rescan (insn);
4209 /* Should not happen as first in the BB is always
4210 either NOTE or LABEL. */
4211 if (BB_END (bb) == after
4212 /* Avoid clobbering of structure when creating new BB. */
4213 && !BARRIER_P (insn)
4214 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4215 BB_END (bb) = insn;
4219 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4220 If BB is NULL, an attempt is made to infer the bb from before.
4222 This and the previous function should be the only functions called
4223 to insert an insn once delay slots have been filled since only
4224 they know how to update a SEQUENCE. */
4226 void
4227 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4229 add_insn_before_nobb (insn, before);
4231 if (!bb
4232 && !BARRIER_P (before)
4233 && !BARRIER_P (insn))
4234 bb = BLOCK_FOR_INSN (before);
4236 if (bb)
4238 set_block_for_insn (insn, bb);
4239 if (INSN_P (insn))
4240 df_insn_rescan (insn);
4241 /* Should not happen as first in the BB is always either NOTE or
4242 LABEL. */
4243 gcc_assert (BB_HEAD (bb) != insn
4244 /* Avoid clobbering of structure when creating new BB. */
4245 || BARRIER_P (insn)
4246 || NOTE_INSN_BASIC_BLOCK_P (insn));
4250 /* Replace insn with an deleted instruction note. */
4252 void
4253 set_insn_deleted (rtx_insn *insn)
4255 if (INSN_P (insn))
4256 df_insn_delete (insn);
4257 PUT_CODE (insn, NOTE);
4258 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4262 /* Unlink INSN from the insn chain.
4264 This function knows how to handle sequences.
4266 This function does not invalidate data flow information associated with
4267 INSN (i.e. does not call df_insn_delete). That makes this function
4268 usable for only disconnecting an insn from the chain, and re-emit it
4269 elsewhere later.
4271 To later insert INSN elsewhere in the insn chain via add_insn and
4272 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4273 the caller. Nullifying them here breaks many insn chain walks.
4275 To really delete an insn and related DF information, use delete_insn. */
4277 void
4278 remove_insn (rtx_insn *insn)
4280 rtx_insn *next = NEXT_INSN (insn);
4281 rtx_insn *prev = PREV_INSN (insn);
4282 basic_block bb;
4284 if (prev)
4286 SET_NEXT_INSN (prev) = next;
4287 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4289 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4290 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4293 else
4295 struct sequence_stack *seq;
4297 for (seq = get_current_sequence (); seq; seq = seq->next)
4298 if (insn == seq->first)
4300 seq->first = next;
4301 break;
4304 gcc_assert (seq);
4307 if (next)
4309 SET_PREV_INSN (next) = prev;
4310 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4312 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4313 SET_PREV_INSN (sequence->insn (0)) = prev;
4316 else
4318 struct sequence_stack *seq;
4320 for (seq = get_current_sequence (); seq; seq = seq->next)
4321 if (insn == seq->last)
4323 seq->last = prev;
4324 break;
4327 gcc_assert (seq);
4330 /* Fix up basic block boundaries, if necessary. */
4331 if (!BARRIER_P (insn)
4332 && (bb = BLOCK_FOR_INSN (insn)))
4334 if (BB_HEAD (bb) == insn)
4336 /* Never ever delete the basic block note without deleting whole
4337 basic block. */
4338 gcc_assert (!NOTE_P (insn));
4339 BB_HEAD (bb) = next;
4341 if (BB_END (bb) == insn)
4342 BB_END (bb) = prev;
4346 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4348 void
4349 add_function_usage_to (rtx call_insn, rtx call_fusage)
4351 gcc_assert (call_insn && CALL_P (call_insn));
4353 /* Put the register usage information on the CALL. If there is already
4354 some usage information, put ours at the end. */
4355 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4357 rtx link;
4359 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4360 link = XEXP (link, 1))
4363 XEXP (link, 1) = call_fusage;
4365 else
4366 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4369 /* Delete all insns made since FROM.
4370 FROM becomes the new last instruction. */
4372 void
4373 delete_insns_since (rtx_insn *from)
4375 if (from == 0)
4376 set_first_insn (0);
4377 else
4378 SET_NEXT_INSN (from) = 0;
4379 set_last_insn (from);
4382 /* This function is deprecated, please use sequences instead.
4384 Move a consecutive bunch of insns to a different place in the chain.
4385 The insns to be moved are those between FROM and TO.
4386 They are moved to a new position after the insn AFTER.
4387 AFTER must not be FROM or TO or any insn in between.
4389 This function does not know about SEQUENCEs and hence should not be
4390 called after delay-slot filling has been done. */
4392 void
4393 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4395 if (flag_checking)
4397 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4398 gcc_assert (after != x);
4399 gcc_assert (after != to);
4402 /* Splice this bunch out of where it is now. */
4403 if (PREV_INSN (from))
4404 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4405 if (NEXT_INSN (to))
4406 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4407 if (get_last_insn () == to)
4408 set_last_insn (PREV_INSN (from));
4409 if (get_insns () == from)
4410 set_first_insn (NEXT_INSN (to));
4412 /* Make the new neighbors point to it and it to them. */
4413 if (NEXT_INSN (after))
4414 SET_PREV_INSN (NEXT_INSN (after)) = to;
4416 SET_NEXT_INSN (to) = NEXT_INSN (after);
4417 SET_PREV_INSN (from) = after;
4418 SET_NEXT_INSN (after) = from;
4419 if (after == get_last_insn ())
4420 set_last_insn (to);
4423 /* Same as function above, but take care to update BB boundaries. */
4424 void
4425 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4427 rtx_insn *prev = PREV_INSN (from);
4428 basic_block bb, bb2;
4430 reorder_insns_nobb (from, to, after);
4432 if (!BARRIER_P (after)
4433 && (bb = BLOCK_FOR_INSN (after)))
4435 rtx_insn *x;
4436 df_set_bb_dirty (bb);
4438 if (!BARRIER_P (from)
4439 && (bb2 = BLOCK_FOR_INSN (from)))
4441 if (BB_END (bb2) == to)
4442 BB_END (bb2) = prev;
4443 df_set_bb_dirty (bb2);
4446 if (BB_END (bb) == after)
4447 BB_END (bb) = to;
4449 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4450 if (!BARRIER_P (x))
4451 df_insn_change_bb (x, bb);
4456 /* Emit insn(s) of given code and pattern
4457 at a specified place within the doubly-linked list.
4459 All of the emit_foo global entry points accept an object
4460 X which is either an insn list or a PATTERN of a single
4461 instruction.
4463 There are thus a few canonical ways to generate code and
4464 emit it at a specific place in the instruction stream. For
4465 example, consider the instruction named SPOT and the fact that
4466 we would like to emit some instructions before SPOT. We might
4467 do it like this:
4469 start_sequence ();
4470 ... emit the new instructions ...
4471 insns_head = get_insns ();
4472 end_sequence ();
4474 emit_insn_before (insns_head, SPOT);
4476 It used to be common to generate SEQUENCE rtl instead, but that
4477 is a relic of the past which no longer occurs. The reason is that
4478 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4479 generated would almost certainly die right after it was created. */
4481 static rtx_insn *
4482 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4483 basic_block bb,
4484 rtx_insn *(*make_raw) (rtx))
4486 rtx_insn *insn;
4488 gcc_assert (before);
4490 if (x == NULL_RTX)
4491 return last;
4493 switch (GET_CODE (x))
4495 case DEBUG_INSN:
4496 case INSN:
4497 case JUMP_INSN:
4498 case CALL_INSN:
4499 case CODE_LABEL:
4500 case BARRIER:
4501 case NOTE:
4502 insn = as_a <rtx_insn *> (x);
4503 while (insn)
4505 rtx_insn *next = NEXT_INSN (insn);
4506 add_insn_before (insn, before, bb);
4507 last = insn;
4508 insn = next;
4510 break;
4512 #ifdef ENABLE_RTL_CHECKING
4513 case SEQUENCE:
4514 gcc_unreachable ();
4515 break;
4516 #endif
4518 default:
4519 last = (*make_raw) (x);
4520 add_insn_before (last, before, bb);
4521 break;
4524 return last;
4527 /* Make X be output before the instruction BEFORE. */
4529 rtx_insn *
4530 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4532 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4535 /* Make an instruction with body X and code JUMP_INSN
4536 and output it before the instruction BEFORE. */
4538 rtx_jump_insn *
4539 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4541 return as_a <rtx_jump_insn *> (
4542 emit_pattern_before_noloc (x, before, NULL, NULL,
4543 make_jump_insn_raw));
4546 /* Make an instruction with body X and code CALL_INSN
4547 and output it before the instruction BEFORE. */
4549 rtx_insn *
4550 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4552 return emit_pattern_before_noloc (x, before, NULL, NULL,
4553 make_call_insn_raw);
4556 /* Make an instruction with body X and code DEBUG_INSN
4557 and output it before the instruction BEFORE. */
4559 rtx_insn *
4560 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4562 return emit_pattern_before_noloc (x, before, NULL, NULL,
4563 make_debug_insn_raw);
4566 /* Make an insn of code BARRIER
4567 and output it before the insn BEFORE. */
4569 rtx_barrier *
4570 emit_barrier_before (rtx_insn *before)
4572 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4574 INSN_UID (insn) = cur_insn_uid++;
4576 add_insn_before (insn, before, NULL);
4577 return insn;
4580 /* Emit the label LABEL before the insn BEFORE. */
4582 rtx_code_label *
4583 emit_label_before (rtx_code_label *label, rtx_insn *before)
4585 gcc_checking_assert (INSN_UID (label) == 0);
4586 INSN_UID (label) = cur_insn_uid++;
4587 add_insn_before (label, before, NULL);
4588 return label;
4591 /* Helper for emit_insn_after, handles lists of instructions
4592 efficiently. */
4594 static rtx_insn *
4595 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4597 rtx_insn *last;
4598 rtx_insn *after_after;
4599 if (!bb && !BARRIER_P (after))
4600 bb = BLOCK_FOR_INSN (after);
4602 if (bb)
4604 df_set_bb_dirty (bb);
4605 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4606 if (!BARRIER_P (last))
4608 set_block_for_insn (last, bb);
4609 df_insn_rescan (last);
4611 if (!BARRIER_P (last))
4613 set_block_for_insn (last, bb);
4614 df_insn_rescan (last);
4616 if (BB_END (bb) == after)
4617 BB_END (bb) = last;
4619 else
4620 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4621 continue;
4623 after_after = NEXT_INSN (after);
4625 SET_NEXT_INSN (after) = first;
4626 SET_PREV_INSN (first) = after;
4627 SET_NEXT_INSN (last) = after_after;
4628 if (after_after)
4629 SET_PREV_INSN (after_after) = last;
4631 if (after == get_last_insn ())
4632 set_last_insn (last);
4634 return last;
4637 static rtx_insn *
4638 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4639 rtx_insn *(*make_raw)(rtx))
4641 rtx_insn *last = after;
4643 gcc_assert (after);
4645 if (x == NULL_RTX)
4646 return last;
4648 switch (GET_CODE (x))
4650 case DEBUG_INSN:
4651 case INSN:
4652 case JUMP_INSN:
4653 case CALL_INSN:
4654 case CODE_LABEL:
4655 case BARRIER:
4656 case NOTE:
4657 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4658 break;
4660 #ifdef ENABLE_RTL_CHECKING
4661 case SEQUENCE:
4662 gcc_unreachable ();
4663 break;
4664 #endif
4666 default:
4667 last = (*make_raw) (x);
4668 add_insn_after (last, after, bb);
4669 break;
4672 return last;
4675 /* Make X be output after the insn AFTER and set the BB of insn. If
4676 BB is NULL, an attempt is made to infer the BB from AFTER. */
4678 rtx_insn *
4679 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4681 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4685 /* Make an insn of code JUMP_INSN with body X
4686 and output it after the insn AFTER. */
4688 rtx_jump_insn *
4689 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4691 return as_a <rtx_jump_insn *> (
4692 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4695 /* Make an instruction with body X and code CALL_INSN
4696 and output it after the instruction AFTER. */
4698 rtx_insn *
4699 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4701 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4704 /* Make an instruction with body X and code CALL_INSN
4705 and output it after the instruction AFTER. */
4707 rtx_insn *
4708 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4710 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4713 /* Make an insn of code BARRIER
4714 and output it after the insn AFTER. */
4716 rtx_barrier *
4717 emit_barrier_after (rtx_insn *after)
4719 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4721 INSN_UID (insn) = cur_insn_uid++;
4723 add_insn_after (insn, after, NULL);
4724 return insn;
4727 /* Emit the label LABEL after the insn AFTER. */
4729 rtx_insn *
4730 emit_label_after (rtx_insn *label, rtx_insn *after)
4732 gcc_checking_assert (INSN_UID (label) == 0);
4733 INSN_UID (label) = cur_insn_uid++;
4734 add_insn_after (label, after, NULL);
4735 return label;
4738 /* Notes require a bit of special handling: Some notes need to have their
4739 BLOCK_FOR_INSN set, others should never have it set, and some should
4740 have it set or clear depending on the context. */
4742 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4743 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4744 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4746 static bool
4747 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4749 switch (subtype)
4751 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4752 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4753 return true;
4755 /* Notes for var tracking and EH region markers can appear between or
4756 inside basic blocks. If the caller is emitting on the basic block
4757 boundary, do not set BLOCK_FOR_INSN on the new note. */
4758 case NOTE_INSN_VAR_LOCATION:
4759 case NOTE_INSN_EH_REGION_BEG:
4760 case NOTE_INSN_EH_REGION_END:
4761 return on_bb_boundary_p;
4763 /* Otherwise, BLOCK_FOR_INSN must be set. */
4764 default:
4765 return false;
4769 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4771 rtx_note *
4772 emit_note_after (enum insn_note subtype, rtx_insn *after)
4774 rtx_note *note = make_note_raw (subtype);
4775 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4776 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4778 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4779 add_insn_after_nobb (note, after);
4780 else
4781 add_insn_after (note, after, bb);
4782 return note;
4785 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4787 rtx_note *
4788 emit_note_before (enum insn_note subtype, rtx_insn *before)
4790 rtx_note *note = make_note_raw (subtype);
4791 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4792 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4794 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4795 add_insn_before_nobb (note, before);
4796 else
4797 add_insn_before (note, before, bb);
4798 return note;
4801 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4802 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4804 static rtx_insn *
4805 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4806 rtx_insn *(*make_raw) (rtx))
4808 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4810 if (pattern == NULL_RTX || !loc)
4811 return last;
4813 after = NEXT_INSN (after);
4814 while (1)
4816 if (active_insn_p (after)
4817 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4818 && !INSN_LOCATION (after))
4819 INSN_LOCATION (after) = loc;
4820 if (after == last)
4821 break;
4822 after = NEXT_INSN (after);
4824 return last;
4827 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4828 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4829 any DEBUG_INSNs. */
4831 static rtx_insn *
4832 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4833 rtx_insn *(*make_raw) (rtx))
4835 rtx_insn *prev = after;
4837 if (skip_debug_insns)
4838 while (DEBUG_INSN_P (prev))
4839 prev = PREV_INSN (prev);
4841 if (INSN_P (prev))
4842 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4843 make_raw);
4844 else
4845 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4848 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4849 rtx_insn *
4850 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4852 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4855 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4856 rtx_insn *
4857 emit_insn_after (rtx pattern, rtx_insn *after)
4859 return emit_pattern_after (pattern, after, true, make_insn_raw);
4862 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4863 rtx_jump_insn *
4864 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4866 return as_a <rtx_jump_insn *> (
4867 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4870 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4871 rtx_jump_insn *
4872 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4874 return as_a <rtx_jump_insn *> (
4875 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4878 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4879 rtx_insn *
4880 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4882 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4885 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4886 rtx_insn *
4887 emit_call_insn_after (rtx pattern, rtx_insn *after)
4889 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4892 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4893 rtx_insn *
4894 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4896 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4899 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4900 rtx_insn *
4901 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4903 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4906 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4907 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4908 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4909 CALL_INSN, etc. */
4911 static rtx_insn *
4912 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4913 bool insnp, rtx_insn *(*make_raw) (rtx))
4915 rtx_insn *first = PREV_INSN (before);
4916 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4917 insnp ? before : NULL,
4918 NULL, make_raw);
4920 if (pattern == NULL_RTX || !loc)
4921 return last;
4923 if (!first)
4924 first = get_insns ();
4925 else
4926 first = NEXT_INSN (first);
4927 while (1)
4929 if (active_insn_p (first)
4930 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4931 && !INSN_LOCATION (first))
4932 INSN_LOCATION (first) = loc;
4933 if (first == last)
4934 break;
4935 first = NEXT_INSN (first);
4937 return last;
4940 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4941 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4942 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4943 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4945 static rtx_insn *
4946 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4947 bool insnp, rtx_insn *(*make_raw) (rtx))
4949 rtx_insn *next = before;
4951 if (skip_debug_insns)
4952 while (DEBUG_INSN_P (next))
4953 next = PREV_INSN (next);
4955 if (INSN_P (next))
4956 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4957 insnp, make_raw);
4958 else
4959 return emit_pattern_before_noloc (pattern, before,
4960 insnp ? before : NULL,
4961 NULL, make_raw);
4964 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4965 rtx_insn *
4966 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4968 return emit_pattern_before_setloc (pattern, before, loc, true,
4969 make_insn_raw);
4972 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4973 rtx_insn *
4974 emit_insn_before (rtx pattern, rtx_insn *before)
4976 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4979 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4980 rtx_jump_insn *
4981 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4983 return as_a <rtx_jump_insn *> (
4984 emit_pattern_before_setloc (pattern, before, loc, false,
4985 make_jump_insn_raw));
4988 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4989 rtx_jump_insn *
4990 emit_jump_insn_before (rtx pattern, rtx_insn *before)
4992 return as_a <rtx_jump_insn *> (
4993 emit_pattern_before (pattern, before, true, false,
4994 make_jump_insn_raw));
4997 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4998 rtx_insn *
4999 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5001 return emit_pattern_before_setloc (pattern, before, loc, false,
5002 make_call_insn_raw);
5005 /* Like emit_call_insn_before_noloc,
5006 but set insn_location according to BEFORE. */
5007 rtx_insn *
5008 emit_call_insn_before (rtx pattern, rtx_insn *before)
5010 return emit_pattern_before (pattern, before, true, false,
5011 make_call_insn_raw);
5014 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5015 rtx_insn *
5016 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5018 return emit_pattern_before_setloc (pattern, before, loc, false,
5019 make_debug_insn_raw);
5022 /* Like emit_debug_insn_before_noloc,
5023 but set insn_location according to BEFORE. */
5024 rtx_insn *
5025 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5027 return emit_pattern_before (pattern, before, false, false,
5028 make_debug_insn_raw);
5031 /* Take X and emit it at the end of the doubly-linked
5032 INSN list.
5034 Returns the last insn emitted. */
5036 rtx_insn *
5037 emit_insn (rtx x)
5039 rtx_insn *last = get_last_insn ();
5040 rtx_insn *insn;
5042 if (x == NULL_RTX)
5043 return last;
5045 switch (GET_CODE (x))
5047 case DEBUG_INSN:
5048 case INSN:
5049 case JUMP_INSN:
5050 case CALL_INSN:
5051 case CODE_LABEL:
5052 case BARRIER:
5053 case NOTE:
5054 insn = as_a <rtx_insn *> (x);
5055 while (insn)
5057 rtx_insn *next = NEXT_INSN (insn);
5058 add_insn (insn);
5059 last = insn;
5060 insn = next;
5062 break;
5064 #ifdef ENABLE_RTL_CHECKING
5065 case JUMP_TABLE_DATA:
5066 case SEQUENCE:
5067 gcc_unreachable ();
5068 break;
5069 #endif
5071 default:
5072 last = make_insn_raw (x);
5073 add_insn (last);
5074 break;
5077 return last;
5080 /* Make an insn of code DEBUG_INSN with pattern X
5081 and add it to the end of the doubly-linked list. */
5083 rtx_insn *
5084 emit_debug_insn (rtx x)
5086 rtx_insn *last = get_last_insn ();
5087 rtx_insn *insn;
5089 if (x == NULL_RTX)
5090 return last;
5092 switch (GET_CODE (x))
5094 case DEBUG_INSN:
5095 case INSN:
5096 case JUMP_INSN:
5097 case CALL_INSN:
5098 case CODE_LABEL:
5099 case BARRIER:
5100 case NOTE:
5101 insn = as_a <rtx_insn *> (x);
5102 while (insn)
5104 rtx_insn *next = NEXT_INSN (insn);
5105 add_insn (insn);
5106 last = insn;
5107 insn = next;
5109 break;
5111 #ifdef ENABLE_RTL_CHECKING
5112 case JUMP_TABLE_DATA:
5113 case SEQUENCE:
5114 gcc_unreachable ();
5115 break;
5116 #endif
5118 default:
5119 last = make_debug_insn_raw (x);
5120 add_insn (last);
5121 break;
5124 return last;
5127 /* Make an insn of code JUMP_INSN with pattern X
5128 and add it to the end of the doubly-linked list. */
5130 rtx_insn *
5131 emit_jump_insn (rtx x)
5133 rtx_insn *last = NULL;
5134 rtx_insn *insn;
5136 switch (GET_CODE (x))
5138 case DEBUG_INSN:
5139 case INSN:
5140 case JUMP_INSN:
5141 case CALL_INSN:
5142 case CODE_LABEL:
5143 case BARRIER:
5144 case NOTE:
5145 insn = as_a <rtx_insn *> (x);
5146 while (insn)
5148 rtx_insn *next = NEXT_INSN (insn);
5149 add_insn (insn);
5150 last = insn;
5151 insn = next;
5153 break;
5155 #ifdef ENABLE_RTL_CHECKING
5156 case JUMP_TABLE_DATA:
5157 case SEQUENCE:
5158 gcc_unreachable ();
5159 break;
5160 #endif
5162 default:
5163 last = make_jump_insn_raw (x);
5164 add_insn (last);
5165 break;
5168 return last;
5171 /* Make an insn of code CALL_INSN with pattern X
5172 and add it to the end of the doubly-linked list. */
5174 rtx_insn *
5175 emit_call_insn (rtx x)
5177 rtx_insn *insn;
5179 switch (GET_CODE (x))
5181 case DEBUG_INSN:
5182 case INSN:
5183 case JUMP_INSN:
5184 case CALL_INSN:
5185 case CODE_LABEL:
5186 case BARRIER:
5187 case NOTE:
5188 insn = emit_insn (x);
5189 break;
5191 #ifdef ENABLE_RTL_CHECKING
5192 case SEQUENCE:
5193 case JUMP_TABLE_DATA:
5194 gcc_unreachable ();
5195 break;
5196 #endif
5198 default:
5199 insn = make_call_insn_raw (x);
5200 add_insn (insn);
5201 break;
5204 return insn;
5207 /* Add the label LABEL to the end of the doubly-linked list. */
5209 rtx_code_label *
5210 emit_label (rtx uncast_label)
5212 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5214 gcc_checking_assert (INSN_UID (label) == 0);
5215 INSN_UID (label) = cur_insn_uid++;
5216 add_insn (label);
5217 return label;
5220 /* Make an insn of code JUMP_TABLE_DATA
5221 and add it to the end of the doubly-linked list. */
5223 rtx_jump_table_data *
5224 emit_jump_table_data (rtx table)
5226 rtx_jump_table_data *jump_table_data =
5227 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5228 INSN_UID (jump_table_data) = cur_insn_uid++;
5229 PATTERN (jump_table_data) = table;
5230 BLOCK_FOR_INSN (jump_table_data) = NULL;
5231 add_insn (jump_table_data);
5232 return jump_table_data;
5235 /* Make an insn of code BARRIER
5236 and add it to the end of the doubly-linked list. */
5238 rtx_barrier *
5239 emit_barrier (void)
5241 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5242 INSN_UID (barrier) = cur_insn_uid++;
5243 add_insn (barrier);
5244 return barrier;
5247 /* Emit a copy of note ORIG. */
5249 rtx_note *
5250 emit_note_copy (rtx_note *orig)
5252 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5253 rtx_note *note = make_note_raw (kind);
5254 NOTE_DATA (note) = NOTE_DATA (orig);
5255 add_insn (note);
5256 return note;
5259 /* Make an insn of code NOTE or type NOTE_NO
5260 and add it to the end of the doubly-linked list. */
5262 rtx_note *
5263 emit_note (enum insn_note kind)
5265 rtx_note *note = make_note_raw (kind);
5266 add_insn (note);
5267 return note;
5270 /* Emit a clobber of lvalue X. */
5272 rtx_insn *
5273 emit_clobber (rtx x)
5275 /* CONCATs should not appear in the insn stream. */
5276 if (GET_CODE (x) == CONCAT)
5278 emit_clobber (XEXP (x, 0));
5279 return emit_clobber (XEXP (x, 1));
5281 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5284 /* Return a sequence of insns to clobber lvalue X. */
5286 rtx_insn *
5287 gen_clobber (rtx x)
5289 rtx_insn *seq;
5291 start_sequence ();
5292 emit_clobber (x);
5293 seq = get_insns ();
5294 end_sequence ();
5295 return seq;
5298 /* Emit a use of rvalue X. */
5300 rtx_insn *
5301 emit_use (rtx x)
5303 /* CONCATs should not appear in the insn stream. */
5304 if (GET_CODE (x) == CONCAT)
5306 emit_use (XEXP (x, 0));
5307 return emit_use (XEXP (x, 1));
5309 return emit_insn (gen_rtx_USE (VOIDmode, x));
5312 /* Return a sequence of insns to use rvalue X. */
5314 rtx_insn *
5315 gen_use (rtx x)
5317 rtx_insn *seq;
5319 start_sequence ();
5320 emit_use (x);
5321 seq = get_insns ();
5322 end_sequence ();
5323 return seq;
5326 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5327 Return the set in INSN that such notes describe, or NULL if the notes
5328 have no meaning for INSN. */
5331 set_for_reg_notes (rtx insn)
5333 rtx pat, reg;
5335 if (!INSN_P (insn))
5336 return NULL_RTX;
5338 pat = PATTERN (insn);
5339 if (GET_CODE (pat) == PARALLEL)
5341 /* We do not use single_set because that ignores SETs of unused
5342 registers. REG_EQUAL and REG_EQUIV notes really do require the
5343 PARALLEL to have a single SET. */
5344 if (multiple_sets (insn))
5345 return NULL_RTX;
5346 pat = XVECEXP (pat, 0, 0);
5349 if (GET_CODE (pat) != SET)
5350 return NULL_RTX;
5352 reg = SET_DEST (pat);
5354 /* Notes apply to the contents of a STRICT_LOW_PART. */
5355 if (GET_CODE (reg) == STRICT_LOW_PART
5356 || GET_CODE (reg) == ZERO_EXTRACT)
5357 reg = XEXP (reg, 0);
5359 /* Check that we have a register. */
5360 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5361 return NULL_RTX;
5363 return pat;
5366 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5367 note of this type already exists, remove it first. */
5370 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5372 rtx note = find_reg_note (insn, kind, NULL_RTX);
5374 switch (kind)
5376 case REG_EQUAL:
5377 case REG_EQUIV:
5378 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5379 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5380 return NULL_RTX;
5382 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5383 It serves no useful purpose and breaks eliminate_regs. */
5384 if (GET_CODE (datum) == ASM_OPERANDS)
5385 return NULL_RTX;
5387 /* Notes with side effects are dangerous. Even if the side-effect
5388 initially mirrors one in PATTERN (INSN), later optimizations
5389 might alter the way that the final register value is calculated
5390 and so move or alter the side-effect in some way. The note would
5391 then no longer be a valid substitution for SET_SRC. */
5392 if (side_effects_p (datum))
5393 return NULL_RTX;
5394 break;
5396 default:
5397 break;
5400 if (note)
5401 XEXP (note, 0) = datum;
5402 else
5404 add_reg_note (insn, kind, datum);
5405 note = REG_NOTES (insn);
5408 switch (kind)
5410 case REG_EQUAL:
5411 case REG_EQUIV:
5412 df_notes_rescan (as_a <rtx_insn *> (insn));
5413 break;
5414 default:
5415 break;
5418 return note;
5421 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5423 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5425 rtx set = set_for_reg_notes (insn);
5427 if (set && SET_DEST (set) == dst)
5428 return set_unique_reg_note (insn, kind, datum);
5429 return NULL_RTX;
5432 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5433 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5434 is true.
5436 If X is a label, it is simply added into the insn chain. */
5438 rtx_insn *
5439 emit (rtx x, bool allow_barrier_p)
5441 enum rtx_code code = classify_insn (x);
5443 switch (code)
5445 case CODE_LABEL:
5446 return emit_label (x);
5447 case INSN:
5448 return emit_insn (x);
5449 case JUMP_INSN:
5451 rtx_insn *insn = emit_jump_insn (x);
5452 if (allow_barrier_p
5453 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5454 return emit_barrier ();
5455 return insn;
5457 case CALL_INSN:
5458 return emit_call_insn (x);
5459 case DEBUG_INSN:
5460 return emit_debug_insn (x);
5461 default:
5462 gcc_unreachable ();
5466 /* Space for free sequence stack entries. */
5467 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5469 /* Begin emitting insns to a sequence. If this sequence will contain
5470 something that might cause the compiler to pop arguments to function
5471 calls (because those pops have previously been deferred; see
5472 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5473 before calling this function. That will ensure that the deferred
5474 pops are not accidentally emitted in the middle of this sequence. */
5476 void
5477 start_sequence (void)
5479 struct sequence_stack *tem;
5481 if (free_sequence_stack != NULL)
5483 tem = free_sequence_stack;
5484 free_sequence_stack = tem->next;
5486 else
5487 tem = ggc_alloc<sequence_stack> ();
5489 tem->next = get_current_sequence ()->next;
5490 tem->first = get_insns ();
5491 tem->last = get_last_insn ();
5492 get_current_sequence ()->next = tem;
5494 set_first_insn (0);
5495 set_last_insn (0);
5498 /* Set up the insn chain starting with FIRST as the current sequence,
5499 saving the previously current one. See the documentation for
5500 start_sequence for more information about how to use this function. */
5502 void
5503 push_to_sequence (rtx_insn *first)
5505 rtx_insn *last;
5507 start_sequence ();
5509 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5512 set_first_insn (first);
5513 set_last_insn (last);
5516 /* Like push_to_sequence, but take the last insn as an argument to avoid
5517 looping through the list. */
5519 void
5520 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5522 start_sequence ();
5524 set_first_insn (first);
5525 set_last_insn (last);
5528 /* Set up the outer-level insn chain
5529 as the current sequence, saving the previously current one. */
5531 void
5532 push_topmost_sequence (void)
5534 struct sequence_stack *top;
5536 start_sequence ();
5538 top = get_topmost_sequence ();
5539 set_first_insn (top->first);
5540 set_last_insn (top->last);
5543 /* After emitting to the outer-level insn chain, update the outer-level
5544 insn chain, and restore the previous saved state. */
5546 void
5547 pop_topmost_sequence (void)
5549 struct sequence_stack *top;
5551 top = get_topmost_sequence ();
5552 top->first = get_insns ();
5553 top->last = get_last_insn ();
5555 end_sequence ();
5558 /* After emitting to a sequence, restore previous saved state.
5560 To get the contents of the sequence just made, you must call
5561 `get_insns' *before* calling here.
5563 If the compiler might have deferred popping arguments while
5564 generating this sequence, and this sequence will not be immediately
5565 inserted into the instruction stream, use do_pending_stack_adjust
5566 before calling get_insns. That will ensure that the deferred
5567 pops are inserted into this sequence, and not into some random
5568 location in the instruction stream. See INHIBIT_DEFER_POP for more
5569 information about deferred popping of arguments. */
5571 void
5572 end_sequence (void)
5574 struct sequence_stack *tem = get_current_sequence ()->next;
5576 set_first_insn (tem->first);
5577 set_last_insn (tem->last);
5578 get_current_sequence ()->next = tem->next;
5580 memset (tem, 0, sizeof (*tem));
5581 tem->next = free_sequence_stack;
5582 free_sequence_stack = tem;
5585 /* Return 1 if currently emitting into a sequence. */
5588 in_sequence_p (void)
5590 return get_current_sequence ()->next != 0;
5593 /* Put the various virtual registers into REGNO_REG_RTX. */
5595 static void
5596 init_virtual_regs (void)
5598 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5599 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5600 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5601 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5602 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5603 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5604 = virtual_preferred_stack_boundary_rtx;
5608 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5609 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5610 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5611 static int copy_insn_n_scratches;
5613 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5614 copied an ASM_OPERANDS.
5615 In that case, it is the original input-operand vector. */
5616 static rtvec orig_asm_operands_vector;
5618 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5619 copied an ASM_OPERANDS.
5620 In that case, it is the copied input-operand vector. */
5621 static rtvec copy_asm_operands_vector;
5623 /* Likewise for the constraints vector. */
5624 static rtvec orig_asm_constraints_vector;
5625 static rtvec copy_asm_constraints_vector;
5627 /* Recursively create a new copy of an rtx for copy_insn.
5628 This function differs from copy_rtx in that it handles SCRATCHes and
5629 ASM_OPERANDs properly.
5630 Normally, this function is not used directly; use copy_insn as front end.
5631 However, you could first copy an insn pattern with copy_insn and then use
5632 this function afterwards to properly copy any REG_NOTEs containing
5633 SCRATCHes. */
5636 copy_insn_1 (rtx orig)
5638 rtx copy;
5639 int i, j;
5640 RTX_CODE code;
5641 const char *format_ptr;
5643 if (orig == NULL)
5644 return NULL;
5646 code = GET_CODE (orig);
5648 switch (code)
5650 case REG:
5651 case DEBUG_EXPR:
5652 CASE_CONST_ANY:
5653 case SYMBOL_REF:
5654 case CODE_LABEL:
5655 case PC:
5656 case RETURN:
5657 case SIMPLE_RETURN:
5658 return orig;
5659 case CLOBBER:
5660 /* Share clobbers of hard registers, but do not share pseudo reg
5661 clobbers or clobbers of hard registers that originated as pseudos.
5662 This is needed to allow safe register renaming. */
5663 if (REG_P (XEXP (orig, 0))
5664 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5665 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5666 return orig;
5667 break;
5669 case SCRATCH:
5670 for (i = 0; i < copy_insn_n_scratches; i++)
5671 if (copy_insn_scratch_in[i] == orig)
5672 return copy_insn_scratch_out[i];
5673 break;
5675 case CONST:
5676 if (shared_const_p (orig))
5677 return orig;
5678 break;
5680 /* A MEM with a constant address is not sharable. The problem is that
5681 the constant address may need to be reloaded. If the mem is shared,
5682 then reloading one copy of this mem will cause all copies to appear
5683 to have been reloaded. */
5685 default:
5686 break;
5689 /* Copy the various flags, fields, and other information. We assume
5690 that all fields need copying, and then clear the fields that should
5691 not be copied. That is the sensible default behavior, and forces
5692 us to explicitly document why we are *not* copying a flag. */
5693 copy = shallow_copy_rtx (orig);
5695 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5696 if (INSN_P (orig))
5698 RTX_FLAG (copy, jump) = 0;
5699 RTX_FLAG (copy, call) = 0;
5700 RTX_FLAG (copy, frame_related) = 0;
5703 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5705 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5706 switch (*format_ptr++)
5708 case 'e':
5709 if (XEXP (orig, i) != NULL)
5710 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5711 break;
5713 case 'E':
5714 case 'V':
5715 if (XVEC (orig, i) == orig_asm_constraints_vector)
5716 XVEC (copy, i) = copy_asm_constraints_vector;
5717 else if (XVEC (orig, i) == orig_asm_operands_vector)
5718 XVEC (copy, i) = copy_asm_operands_vector;
5719 else if (XVEC (orig, i) != NULL)
5721 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5722 for (j = 0; j < XVECLEN (copy, i); j++)
5723 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5725 break;
5727 case 't':
5728 case 'w':
5729 case 'i':
5730 case 'p':
5731 case 's':
5732 case 'S':
5733 case 'u':
5734 case '0':
5735 /* These are left unchanged. */
5736 break;
5738 default:
5739 gcc_unreachable ();
5742 if (code == SCRATCH)
5744 i = copy_insn_n_scratches++;
5745 gcc_assert (i < MAX_RECOG_OPERANDS);
5746 copy_insn_scratch_in[i] = orig;
5747 copy_insn_scratch_out[i] = copy;
5749 else if (code == ASM_OPERANDS)
5751 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5752 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5753 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5754 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5757 return copy;
5760 /* Create a new copy of an rtx.
5761 This function differs from copy_rtx in that it handles SCRATCHes and
5762 ASM_OPERANDs properly.
5763 INSN doesn't really have to be a full INSN; it could be just the
5764 pattern. */
5766 copy_insn (rtx insn)
5768 copy_insn_n_scratches = 0;
5769 orig_asm_operands_vector = 0;
5770 orig_asm_constraints_vector = 0;
5771 copy_asm_operands_vector = 0;
5772 copy_asm_constraints_vector = 0;
5773 return copy_insn_1 (insn);
5776 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5777 on that assumption that INSN itself remains in its original place. */
5779 rtx_insn *
5780 copy_delay_slot_insn (rtx_insn *insn)
5782 /* Copy INSN with its rtx_code, all its notes, location etc. */
5783 insn = as_a <rtx_insn *> (copy_rtx (insn));
5784 INSN_UID (insn) = cur_insn_uid++;
5785 return insn;
5788 /* Initialize data structures and variables in this file
5789 before generating rtl for each function. */
5791 void
5792 init_emit (void)
5794 set_first_insn (NULL);
5795 set_last_insn (NULL);
5796 if (param_min_nondebug_insn_uid)
5797 cur_insn_uid = param_min_nondebug_insn_uid;
5798 else
5799 cur_insn_uid = 1;
5800 cur_debug_insn_uid = 1;
5801 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5802 first_label_num = label_num;
5803 get_current_sequence ()->next = NULL;
5805 /* Init the tables that describe all the pseudo regs. */
5807 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5809 crtl->emit.regno_pointer_align
5810 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5812 regno_reg_rtx
5813 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5815 /* Put copies of all the hard registers into regno_reg_rtx. */
5816 memcpy (regno_reg_rtx,
5817 initial_regno_reg_rtx,
5818 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5820 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5821 init_virtual_regs ();
5823 /* Indicate that the virtual registers and stack locations are
5824 all pointers. */
5825 REG_POINTER (stack_pointer_rtx) = 1;
5826 REG_POINTER (frame_pointer_rtx) = 1;
5827 REG_POINTER (hard_frame_pointer_rtx) = 1;
5828 REG_POINTER (arg_pointer_rtx) = 1;
5830 REG_POINTER (virtual_incoming_args_rtx) = 1;
5831 REG_POINTER (virtual_stack_vars_rtx) = 1;
5832 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5833 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5834 REG_POINTER (virtual_cfa_rtx) = 1;
5836 #ifdef STACK_BOUNDARY
5837 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5838 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5839 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5840 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5842 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5843 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5844 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5845 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5847 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5848 #endif
5850 #ifdef INIT_EXPANDERS
5851 INIT_EXPANDERS;
5852 #endif
5855 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5857 wide_int
5858 const_vector_int_elt (const_rtx x, unsigned int i)
5860 /* First handle elements that are directly encoded. */
5861 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5862 if (i < (unsigned int) XVECLEN (x, 0))
5863 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5865 /* Identify the pattern that contains element I and work out the index of
5866 the last encoded element for that pattern. */
5867 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5868 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5869 unsigned int count = i / npatterns;
5870 unsigned int pattern = i % npatterns;
5871 unsigned int final_i = encoded_nelts - npatterns + pattern;
5873 /* If there are no steps, the final encoded value is the right one. */
5874 if (!CONST_VECTOR_STEPPED_P (x))
5875 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5877 /* Otherwise work out the value from the last two encoded elements. */
5878 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5879 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5880 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5881 rtx_mode_t (v1, elt_mode));
5882 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5885 /* Return the value of element I of CONST_VECTOR X. */
5888 const_vector_elt (const_rtx x, unsigned int i)
5890 /* First handle elements that are directly encoded. */
5891 if (i < (unsigned int) XVECLEN (x, 0))
5892 return CONST_VECTOR_ENCODED_ELT (x, i);
5894 /* If there are no steps, the final encoded value is the right one. */
5895 if (!CONST_VECTOR_STEPPED_P (x))
5897 /* Identify the pattern that contains element I and work out the index of
5898 the last encoded element for that pattern. */
5899 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5900 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5901 unsigned int pattern = i % npatterns;
5902 unsigned int final_i = encoded_nelts - npatterns + pattern;
5903 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5906 /* Otherwise work out the value from the last two encoded elements. */
5907 return immed_wide_int_const (const_vector_int_elt (x, i),
5908 GET_MODE_INNER (GET_MODE (x)));
5911 /* Return true if X is a valid element for a CONST_VECTOR of the given
5912 mode. */
5914 bool
5915 valid_for_const_vector_p (machine_mode, rtx x)
5917 return (CONST_SCALAR_INT_P (x)
5918 || CONST_POLY_INT_P (x)
5919 || CONST_DOUBLE_AS_FLOAT_P (x)
5920 || CONST_FIXED_P (x));
5923 /* Generate a vector constant of mode MODE in which every element has
5924 value ELT. */
5927 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5929 rtx_vector_builder builder (mode, 1, 1);
5930 builder.quick_push (elt);
5931 return builder.build ();
5934 /* Return a vector rtx of mode MODE in which every element has value X.
5935 The result will be a constant if X is constant. */
5938 gen_vec_duplicate (machine_mode mode, rtx x)
5940 if (valid_for_const_vector_p (mode, x))
5941 return gen_const_vec_duplicate (mode, x);
5942 return gen_rtx_VEC_DUPLICATE (mode, x);
5945 /* A subroutine of const_vec_series_p that handles the case in which:
5947 (GET_CODE (X) == CONST_VECTOR
5948 && CONST_VECTOR_NPATTERNS (X) == 1
5949 && !CONST_VECTOR_DUPLICATE_P (X))
5951 is known to hold. */
5953 bool
5954 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5956 /* Stepped sequences are only defined for integers, to avoid specifying
5957 rounding behavior. */
5958 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5959 return false;
5961 /* A non-duplicated vector with two elements can always be seen as a
5962 series with a nonzero step. Longer vectors must have a stepped
5963 encoding. */
5964 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5965 && !CONST_VECTOR_STEPPED_P (x))
5966 return false;
5968 /* Calculate the step between the first and second elements. */
5969 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5970 rtx base = CONST_VECTOR_ELT (x, 0);
5971 rtx step = simplify_binary_operation (MINUS, inner,
5972 CONST_VECTOR_ENCODED_ELT (x, 1), base);
5973 if (rtx_equal_p (step, CONST0_RTX (inner)))
5974 return false;
5976 /* If we have a stepped encoding, check that the step between the
5977 second and third elements is the same as STEP. */
5978 if (CONST_VECTOR_STEPPED_P (x))
5980 rtx diff = simplify_binary_operation (MINUS, inner,
5981 CONST_VECTOR_ENCODED_ELT (x, 2),
5982 CONST_VECTOR_ENCODED_ELT (x, 1));
5983 if (!rtx_equal_p (step, diff))
5984 return false;
5987 *base_out = base;
5988 *step_out = step;
5989 return true;
5992 /* Generate a vector constant of mode MODE in which element I has
5993 the value BASE + I * STEP. */
5996 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5998 gcc_assert (valid_for_const_vector_p (mode, base)
5999 && valid_for_const_vector_p (mode, step));
6001 rtx_vector_builder builder (mode, 1, 3);
6002 builder.quick_push (base);
6003 for (int i = 1; i < 3; ++i)
6004 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6005 builder[i - 1], step));
6006 return builder.build ();
6009 /* Generate a vector of mode MODE in which element I has the value
6010 BASE + I * STEP. The result will be a constant if BASE and STEP
6011 are both constants. */
6014 gen_vec_series (machine_mode mode, rtx base, rtx step)
6016 if (step == const0_rtx)
6017 return gen_vec_duplicate (mode, base);
6018 if (valid_for_const_vector_p (mode, base)
6019 && valid_for_const_vector_p (mode, step))
6020 return gen_const_vec_series (mode, base, step);
6021 return gen_rtx_VEC_SERIES (mode, base, step);
6024 /* Generate a new vector constant for mode MODE and constant value
6025 CONSTANT. */
6027 static rtx
6028 gen_const_vector (machine_mode mode, int constant)
6030 machine_mode inner = GET_MODE_INNER (mode);
6032 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6034 rtx el = const_tiny_rtx[constant][(int) inner];
6035 gcc_assert (el);
6037 return gen_const_vec_duplicate (mode, el);
6040 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6041 all elements are zero, and the one vector when all elements are one. */
6043 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6045 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6047 /* If the values are all the same, check to see if we can use one of the
6048 standard constant vectors. */
6049 if (rtvec_all_equal_p (v))
6050 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6052 unsigned int nunits = GET_NUM_ELEM (v);
6053 rtx_vector_builder builder (mode, nunits, 1);
6054 for (unsigned int i = 0; i < nunits; ++i)
6055 builder.quick_push (RTVEC_ELT (v, i));
6056 return builder.build (v);
6059 /* Initialise global register information required by all functions. */
6061 void
6062 init_emit_regs (void)
6064 int i;
6065 machine_mode mode;
6066 mem_attrs *attrs;
6068 /* Reset register attributes */
6069 reg_attrs_htab->empty ();
6071 /* We need reg_raw_mode, so initialize the modes now. */
6072 init_reg_modes_target ();
6074 /* Assign register numbers to the globally defined register rtx. */
6075 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6076 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6077 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6078 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6079 virtual_incoming_args_rtx =
6080 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6081 virtual_stack_vars_rtx =
6082 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6083 virtual_stack_dynamic_rtx =
6084 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6085 virtual_outgoing_args_rtx =
6086 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6087 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6088 virtual_preferred_stack_boundary_rtx =
6089 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6091 /* Initialize RTL for commonly used hard registers. These are
6092 copied into regno_reg_rtx as we begin to compile each function. */
6093 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6094 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6096 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6097 return_address_pointer_rtx
6098 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6099 #endif
6101 pic_offset_table_rtx = NULL_RTX;
6102 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6103 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6105 /* Process stack-limiting command-line options. */
6106 if (opt_fstack_limit_symbol_arg != NULL)
6107 stack_limit_rtx
6108 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6109 if (opt_fstack_limit_register_no >= 0)
6110 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6112 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6114 mode = (machine_mode) i;
6115 attrs = ggc_cleared_alloc<mem_attrs> ();
6116 attrs->align = BITS_PER_UNIT;
6117 attrs->addrspace = ADDR_SPACE_GENERIC;
6118 if (mode != BLKmode && mode != VOIDmode)
6120 attrs->size_known_p = true;
6121 attrs->size = GET_MODE_SIZE (mode);
6122 if (STRICT_ALIGNMENT)
6123 attrs->align = GET_MODE_ALIGNMENT (mode);
6125 mode_mem_attrs[i] = attrs;
6128 split_branch_probability = profile_probability::uninitialized ();
6131 /* Initialize global machine_mode variables. */
6133 void
6134 init_derived_machine_modes (void)
6136 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6137 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6139 scalar_int_mode mode = mode_iter.require ();
6141 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6142 && !opt_byte_mode.exists ())
6143 opt_byte_mode = mode;
6145 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6146 && !opt_word_mode.exists ())
6147 opt_word_mode = mode;
6150 byte_mode = opt_byte_mode.require ();
6151 word_mode = opt_word_mode.require ();
6152 ptr_mode = as_a <scalar_int_mode>
6153 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6156 /* Create some permanent unique rtl objects shared between all functions. */
6158 void
6159 init_emit_once (void)
6161 int i;
6162 machine_mode mode;
6163 scalar_float_mode double_mode;
6164 opt_scalar_mode smode_iter;
6166 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6167 CONST_FIXED, and memory attribute hash tables. */
6168 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6170 #if TARGET_SUPPORTS_WIDE_INT
6171 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6172 #endif
6173 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6175 if (NUM_POLY_INT_COEFFS > 1)
6176 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6178 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6180 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6182 #ifdef INIT_EXPANDERS
6183 /* This is to initialize {init|mark|free}_machine_status before the first
6184 call to push_function_context_to. This is needed by the Chill front
6185 end which calls push_function_context_to before the first call to
6186 init_function_start. */
6187 INIT_EXPANDERS;
6188 #endif
6190 /* Create the unique rtx's for certain rtx codes and operand values. */
6192 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6193 tries to use these variables. */
6194 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6195 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6196 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6198 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6199 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6200 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6201 else
6202 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6204 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6206 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6207 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6208 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6210 dconstm0 = dconst0;
6211 dconstm0.sign = 1;
6213 dconstm1 = dconst1;
6214 dconstm1.sign = 1;
6216 dconsthalf = dconst1;
6217 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6219 real_inf (&dconstinf);
6220 real_inf (&dconstninf, true);
6222 for (i = 0; i < 3; i++)
6224 const REAL_VALUE_TYPE *const r =
6225 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6227 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6228 const_tiny_rtx[i][(int) mode] =
6229 const_double_from_real_value (*r, mode);
6231 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6232 const_tiny_rtx[i][(int) mode] =
6233 const_double_from_real_value (*r, mode);
6235 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6237 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6238 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6240 for (mode = MIN_MODE_PARTIAL_INT;
6241 mode <= MAX_MODE_PARTIAL_INT;
6242 mode = (machine_mode)((int)(mode) + 1))
6243 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6246 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6248 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6249 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6251 /* For BImode, 1 and -1 are unsigned and signed interpretations
6252 of the same value. */
6253 for (mode = MIN_MODE_BOOL;
6254 mode <= MAX_MODE_BOOL;
6255 mode = (machine_mode)((int)(mode) + 1))
6257 const_tiny_rtx[0][(int) mode] = const0_rtx;
6258 if (mode == BImode)
6260 const_tiny_rtx[1][(int) mode] = const_true_rtx;
6261 const_tiny_rtx[3][(int) mode] = const_true_rtx;
6263 else
6265 const_tiny_rtx[1][(int) mode] = const1_rtx;
6266 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6270 for (mode = MIN_MODE_PARTIAL_INT;
6271 mode <= MAX_MODE_PARTIAL_INT;
6272 mode = (machine_mode)((int)(mode) + 1))
6273 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6275 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6277 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6278 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6281 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6283 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6284 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6287 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6289 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6290 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6291 if (GET_MODE_INNER (mode) == BImode)
6292 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6293 interpretations of the same value. */
6294 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6295 else
6296 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6299 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6301 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6302 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6303 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6306 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6308 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6309 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6312 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6314 scalar_mode smode = smode_iter.require ();
6315 FCONST0 (smode).data.high = 0;
6316 FCONST0 (smode).data.low = 0;
6317 FCONST0 (smode).mode = smode;
6318 const_tiny_rtx[0][(int) smode]
6319 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6322 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6324 scalar_mode smode = smode_iter.require ();
6325 FCONST0 (smode).data.high = 0;
6326 FCONST0 (smode).data.low = 0;
6327 FCONST0 (smode).mode = smode;
6328 const_tiny_rtx[0][(int) smode]
6329 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6332 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6334 scalar_mode smode = smode_iter.require ();
6335 FCONST0 (smode).data.high = 0;
6336 FCONST0 (smode).data.low = 0;
6337 FCONST0 (smode).mode = smode;
6338 const_tiny_rtx[0][(int) smode]
6339 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6341 /* We store the value 1. */
6342 FCONST1 (smode).data.high = 0;
6343 FCONST1 (smode).data.low = 0;
6344 FCONST1 (smode).mode = smode;
6345 FCONST1 (smode).data
6346 = double_int_one.lshift (GET_MODE_FBIT (smode),
6347 HOST_BITS_PER_DOUBLE_INT,
6348 SIGNED_FIXED_POINT_MODE_P (smode));
6349 const_tiny_rtx[1][(int) smode]
6350 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6353 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6355 scalar_mode smode = smode_iter.require ();
6356 FCONST0 (smode).data.high = 0;
6357 FCONST0 (smode).data.low = 0;
6358 FCONST0 (smode).mode = smode;
6359 const_tiny_rtx[0][(int) smode]
6360 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6362 /* We store the value 1. */
6363 FCONST1 (smode).data.high = 0;
6364 FCONST1 (smode).data.low = 0;
6365 FCONST1 (smode).mode = smode;
6366 FCONST1 (smode).data
6367 = double_int_one.lshift (GET_MODE_FBIT (smode),
6368 HOST_BITS_PER_DOUBLE_INT,
6369 SIGNED_FIXED_POINT_MODE_P (smode));
6370 const_tiny_rtx[1][(int) smode]
6371 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6374 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6376 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6379 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6381 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6384 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6386 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6387 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6390 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6392 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6393 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6396 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6397 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6398 const_tiny_rtx[0][i] = const0_rtx;
6400 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6401 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6402 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6403 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6404 /*prev_insn=*/NULL,
6405 /*next_insn=*/NULL,
6406 /*bb=*/NULL,
6407 /*pattern=*/NULL_RTX,
6408 /*location=*/-1,
6409 CODE_FOR_nothing,
6410 /*reg_notes=*/NULL_RTX);
6413 /* Produce exact duplicate of insn INSN after AFTER.
6414 Care updating of libcall regions if present. */
6416 rtx_insn *
6417 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6419 rtx_insn *new_rtx;
6420 rtx link;
6422 switch (GET_CODE (insn))
6424 case INSN:
6425 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6426 break;
6428 case JUMP_INSN:
6429 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6430 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6431 break;
6433 case DEBUG_INSN:
6434 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6435 break;
6437 case CALL_INSN:
6438 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6439 if (CALL_INSN_FUNCTION_USAGE (insn))
6440 CALL_INSN_FUNCTION_USAGE (new_rtx)
6441 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6442 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6443 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6444 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6445 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6446 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6447 break;
6449 default:
6450 gcc_unreachable ();
6453 /* Update LABEL_NUSES. */
6454 if (NONDEBUG_INSN_P (insn))
6455 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6457 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6459 /* If the old insn is frame related, then so is the new one. This is
6460 primarily needed for IA-64 unwind info which marks epilogue insns,
6461 which may be duplicated by the basic block reordering code. */
6462 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6464 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6465 rtx *ptail = &REG_NOTES (new_rtx);
6466 while (*ptail != NULL_RTX)
6467 ptail = &XEXP (*ptail, 1);
6469 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6470 will make them. REG_LABEL_TARGETs are created there too, but are
6471 supposed to be sticky, so we copy them. */
6472 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6473 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6475 *ptail = duplicate_reg_note (link);
6476 ptail = &XEXP (*ptail, 1);
6479 INSN_CODE (new_rtx) = INSN_CODE (insn);
6480 return new_rtx;
6483 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6485 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6487 if (hard_reg_clobbers[mode][regno])
6488 return hard_reg_clobbers[mode][regno];
6489 else
6490 return (hard_reg_clobbers[mode][regno] =
6491 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6494 location_t prologue_location;
6495 location_t epilogue_location;
6497 /* Hold current location information and last location information, so the
6498 datastructures are built lazily only when some instructions in given
6499 place are needed. */
6500 static location_t curr_location;
6502 /* Allocate insn location datastructure. */
6503 void
6504 insn_locations_init (void)
6506 prologue_location = epilogue_location = 0;
6507 curr_location = UNKNOWN_LOCATION;
6510 /* At the end of emit stage, clear current location. */
6511 void
6512 insn_locations_finalize (void)
6514 epilogue_location = curr_location;
6515 curr_location = UNKNOWN_LOCATION;
6518 /* Set current location. */
6519 void
6520 set_curr_insn_location (location_t location)
6522 curr_location = location;
6525 /* Get current location. */
6526 location_t
6527 curr_insn_location (void)
6529 return curr_location;
6532 /* Set the location of the insn chain starting at INSN to LOC. */
6533 void
6534 set_insn_locations (rtx_insn *insn, location_t loc)
6536 while (insn)
6538 if (INSN_P (insn))
6539 INSN_LOCATION (insn) = loc;
6540 insn = NEXT_INSN (insn);
6544 /* Return lexical scope block insn belongs to. */
6545 tree
6546 insn_scope (const rtx_insn *insn)
6548 return LOCATION_BLOCK (INSN_LOCATION (insn));
6551 /* Return line number of the statement that produced this insn. */
6553 insn_line (const rtx_insn *insn)
6555 return LOCATION_LINE (INSN_LOCATION (insn));
6558 /* Return source file of the statement that produced this insn. */
6559 const char *
6560 insn_file (const rtx_insn *insn)
6562 return LOCATION_FILE (INSN_LOCATION (insn));
6565 /* Return expanded location of the statement that produced this insn. */
6566 expanded_location
6567 insn_location (const rtx_insn *insn)
6569 return expand_location (INSN_LOCATION (insn));
6572 /* Return true if memory model MODEL requires a pre-operation (release-style)
6573 barrier or a post-operation (acquire-style) barrier. While not universal,
6574 this function matches behavior of several targets. */
6576 bool
6577 need_atomic_barrier_p (enum memmodel model, bool pre)
6579 switch (model & MEMMODEL_BASE_MASK)
6581 case MEMMODEL_RELAXED:
6582 case MEMMODEL_CONSUME:
6583 return false;
6584 case MEMMODEL_RELEASE:
6585 return pre;
6586 case MEMMODEL_ACQUIRE:
6587 return !pre;
6588 case MEMMODEL_ACQ_REL:
6589 case MEMMODEL_SEQ_CST:
6590 return true;
6591 default:
6592 gcc_unreachable ();
6596 /* Return a constant shift amount for shifting a value of mode MODE
6597 by VALUE bits. */
6600 gen_int_shift_amount (machine_mode, poly_int64 value)
6602 /* Use a 64-bit mode, to avoid any truncation.
6604 ??? Perhaps this should be automatically derived from the .md files
6605 instead, or perhaps have a target hook. */
6606 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6607 ? DImode
6608 : int_mode_for_size (64, 0).require ());
6609 return gen_int_mode (value, shift_mode);
6612 /* Initialize fields of rtl_data related to stack alignment. */
6614 void
6615 rtl_data::init_stack_alignment ()
6617 stack_alignment_needed = STACK_BOUNDARY;
6618 max_used_stack_slot_alignment = STACK_BOUNDARY;
6619 stack_alignment_estimated = 0;
6620 preferred_stack_boundary = STACK_BOUNDARY;
6624 #include "gt-emit-rtl.h"