ada: Minor tweaks for comparison operators
[official-gcc.git] / gcc / emit-rtl.cc
blob84b6833225e627f36361bc5de095065e808a02ba
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.cc, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
80 /* Datastructures maintained for currently processed function in RTL form. */
82 struct rtl_data x_rtl;
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
89 rtx * regno_reg_rtx;
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
94 static GTY(()) int label_num = 1;
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
103 rtx const_true_rtx;
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm0;
109 REAL_VALUE_TYPE dconstm1;
110 REAL_VALUE_TYPE dconsthalf;
111 REAL_VALUE_TYPE dconstinf;
112 REAL_VALUE_TYPE dconstninf;
114 /* Record fixed-point constant 0 and 1. */
115 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
118 /* We make one copy of (const_int C) where C is in
119 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
120 to save space during the compilation and simplify comparisons of
121 integers. */
123 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
125 /* Standard pieces of rtx, to be substituted directly into things. */
126 rtx pc_rtx;
127 rtx ret_rtx;
128 rtx simple_return_rtx;
130 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133 rtx_insn *invalid_insn_rtx;
135 /* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
138 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
140 typedef HOST_WIDE_INT compare_type;
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
146 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
148 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
154 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
156 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
158 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
160 static hashval_t hash (rtx x);
161 static bool equal (rtx x, const compare_type &y);
164 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
166 /* A hash table storing register attribute structures. */
167 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
169 static hashval_t hash (reg_attrs *x);
170 static bool equal (reg_attrs *a, reg_attrs *b);
173 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
175 /* A hash table storing all CONST_DOUBLEs. */
176 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
178 static hashval_t hash (rtx x);
179 static bool equal (rtx x, rtx y);
182 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
184 /* A hash table storing all CONST_FIXEDs. */
185 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
187 static hashval_t hash (rtx x);
188 static bool equal (rtx x, rtx y);
191 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
193 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
194 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
195 #define first_label_num (crtl->emit.x_first_label_num)
197 static void set_used_decls (tree);
198 static void mark_label_nuses (rtx);
199 #if TARGET_SUPPORTS_WIDE_INT
200 static rtx lookup_const_wide_int (rtx);
201 #endif
202 static rtx lookup_const_double (rtx);
203 static rtx lookup_const_fixed (rtx);
204 static rtx gen_const_vector (machine_mode, int);
205 static void copy_rtx_if_shared_1 (rtx *orig);
207 /* Probability of the conditional branch currently proceeded by try_split. */
208 profile_probability split_branch_probability;
210 /* Returns a hash code for X (which is a really a CONST_INT). */
212 hashval_t
213 const_int_hasher::hash (rtx x)
215 return (hashval_t) INTVAL (x);
218 /* Returns true if the value represented by X (which is really a
219 CONST_INT) is the same as that given by Y (which is really a
220 HOST_WIDE_INT *). */
222 bool
223 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
225 return (INTVAL (x) == y);
228 #if TARGET_SUPPORTS_WIDE_INT
229 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
231 hashval_t
232 const_wide_int_hasher::hash (rtx x)
234 int i;
235 unsigned HOST_WIDE_INT hash = 0;
236 const_rtx xr = x;
238 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
239 hash += CONST_WIDE_INT_ELT (xr, i);
241 return (hashval_t) hash;
244 /* Returns true if the value represented by X (which is really a
245 CONST_WIDE_INT) is the same as that given by Y (which is really a
246 CONST_WIDE_INT). */
248 bool
249 const_wide_int_hasher::equal (rtx x, rtx y)
251 int i;
252 const_rtx xr = x;
253 const_rtx yr = y;
254 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
255 return false;
257 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
258 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
259 return false;
261 return true;
263 #endif
265 /* Returns a hash code for CONST_POLY_INT X. */
267 hashval_t
268 const_poly_int_hasher::hash (rtx x)
270 inchash::hash h;
271 h.add_int (GET_MODE (x));
272 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
273 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
274 return h.end ();
277 /* Returns true if CONST_POLY_INT X is an rtx representation of Y. */
279 bool
280 const_poly_int_hasher::equal (rtx x, const compare_type &y)
282 if (GET_MODE (x) != y.first)
283 return false;
284 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
285 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
286 return false;
287 return true;
290 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
291 hashval_t
292 const_double_hasher::hash (rtx x)
294 const_rtx const value = x;
295 hashval_t h;
297 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
298 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
299 else
301 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
302 /* MODE is used in the comparison, so it should be in the hash. */
303 h ^= GET_MODE (value);
305 return h;
308 /* Returns true if the value represented by X (really a ...)
309 is the same as that represented by Y (really a ...) */
310 bool
311 const_double_hasher::equal (rtx x, rtx y)
313 const_rtx const a = x, b = y;
315 if (GET_MODE (a) != GET_MODE (b))
316 return false;
317 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
318 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
319 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
320 else
321 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
322 CONST_DOUBLE_REAL_VALUE (b));
325 /* Returns a hash code for X (which is really a CONST_FIXED). */
327 hashval_t
328 const_fixed_hasher::hash (rtx x)
330 const_rtx const value = x;
331 hashval_t h;
333 h = fixed_hash (CONST_FIXED_VALUE (value));
334 /* MODE is used in the comparison, so it should be in the hash. */
335 h ^= GET_MODE (value);
336 return h;
339 /* Returns true if the value represented by X is the same as that
340 represented by Y. */
342 bool
343 const_fixed_hasher::equal (rtx x, rtx y)
345 const_rtx const a = x, b = y;
347 if (GET_MODE (a) != GET_MODE (b))
348 return false;
349 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
352 /* Return true if the given memory attributes are equal. */
354 bool
355 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
357 if (p == q)
358 return true;
359 if (!p || !q)
360 return false;
361 return (p->alias == q->alias
362 && p->offset_known_p == q->offset_known_p
363 && (!p->offset_known_p || known_eq (p->offset, q->offset))
364 && p->size_known_p == q->size_known_p
365 && (!p->size_known_p || known_eq (p->size, q->size))
366 && p->align == q->align
367 && p->addrspace == q->addrspace
368 && (p->expr == q->expr
369 || (p->expr != NULL_TREE && q->expr != NULL_TREE
370 && operand_equal_p (p->expr, q->expr, 0))));
373 /* Set MEM's memory attributes so that they are the same as ATTRS. */
375 static void
376 set_mem_attrs (rtx mem, mem_attrs *attrs)
378 /* If everything is the default, we can just clear the attributes. */
379 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
381 MEM_ATTRS (mem) = 0;
382 return;
385 if (!MEM_ATTRS (mem)
386 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
388 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
389 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
393 /* Returns a hash code for X (which is a really a reg_attrs *). */
395 hashval_t
396 reg_attr_hasher::hash (reg_attrs *x)
398 const reg_attrs *const p = x;
400 inchash::hash h;
401 h.add_ptr (p->decl);
402 h.add_poly_hwi (p->offset);
403 return h.end ();
406 /* Returns true if the value represented by X is the same as that given by
407 Y. */
409 bool
410 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
412 const reg_attrs *const p = x;
413 const reg_attrs *const q = y;
415 return (p->decl == q->decl && known_eq (p->offset, q->offset));
417 /* Allocate a new reg_attrs structure and insert it into the hash table if
418 one identical to it is not already in the table. We are doing this for
419 MEM of mode MODE. */
421 static reg_attrs *
422 get_reg_attrs (tree decl, poly_int64 offset)
424 reg_attrs attrs;
426 /* If everything is the default, we can just return zero. */
427 if (decl == 0 && known_eq (offset, 0))
428 return 0;
430 attrs.decl = decl;
431 attrs.offset = offset;
433 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
434 if (*slot == 0)
436 *slot = ggc_alloc<reg_attrs> ();
437 memcpy (*slot, &attrs, sizeof (reg_attrs));
440 return *slot;
444 #if !HAVE_blockage
445 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
446 and to block register equivalences to be seen across this insn. */
449 gen_blockage (void)
451 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
452 MEM_VOLATILE_P (x) = true;
453 return x;
455 #endif
458 /* Set the mode and register number of X to MODE and REGNO. */
460 void
461 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
463 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
464 ? hard_regno_nregs (regno, mode)
465 : 1);
466 PUT_MODE_RAW (x, mode);
467 set_regno_raw (x, regno, nregs);
470 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
473 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
475 set_mode_and_regno (x, mode, regno);
476 REG_ATTRS (x) = NULL;
477 ORIGINAL_REGNO (x) = regno;
478 return x;
481 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
482 don't attempt to share with the various global pieces of rtl (such as
483 frame_pointer_rtx). */
486 gen_raw_REG (machine_mode mode, unsigned int regno)
488 rtx x = rtx_alloc (REG MEM_STAT_INFO);
489 init_raw_REG (x, mode, regno);
490 return x;
493 /* There are some RTL codes that require special attention; the generation
494 functions do the raw handling. If you add to this list, modify
495 special_rtx in gengenrtl.cc as well. */
497 rtx_expr_list *
498 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
500 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
501 expr_list));
504 rtx_insn_list *
505 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
507 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
508 insn_list));
511 rtx_insn *
512 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
513 basic_block bb, rtx pattern, int location, int code,
514 rtx reg_notes)
516 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
517 prev_insn, next_insn,
518 bb, pattern, location, code,
519 reg_notes));
523 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
525 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
526 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
528 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
529 if (const_true_rtx && arg == STORE_FLAG_VALUE)
530 return const_true_rtx;
531 #endif
533 /* Look up the CONST_INT in the hash table. */
534 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
535 INSERT);
536 if (*slot == 0)
537 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
539 return *slot;
543 gen_int_mode (poly_int64 c, machine_mode mode)
545 c = trunc_int_for_mode (c, mode);
546 if (c.is_constant ())
547 return GEN_INT (c.coeffs[0]);
548 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
549 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
552 /* CONST_DOUBLEs might be created from pairs of integers, or from
553 REAL_VALUE_TYPEs. Also, their length is known only at run time,
554 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
556 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
557 hash table. If so, return its counterpart; otherwise add it
558 to the hash table and return it. */
559 static rtx
560 lookup_const_double (rtx real)
562 rtx *slot = const_double_htab->find_slot (real, INSERT);
563 if (*slot == 0)
564 *slot = real;
566 return *slot;
569 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
570 VALUE in mode MODE. */
572 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
574 rtx real = rtx_alloc (CONST_DOUBLE);
575 PUT_MODE (real, mode);
577 real->u.rv = value;
579 return lookup_const_double (real);
582 /* Determine whether FIXED, a CONST_FIXED, already exists in the
583 hash table. If so, return its counterpart; otherwise add it
584 to the hash table and return it. */
586 static rtx
587 lookup_const_fixed (rtx fixed)
589 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
590 if (*slot == 0)
591 *slot = fixed;
593 return *slot;
596 /* Return a CONST_FIXED rtx for a fixed-point value specified by
597 VALUE in mode MODE. */
600 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
602 rtx fixed = rtx_alloc (CONST_FIXED);
603 PUT_MODE (fixed, mode);
605 fixed->u.fv = value;
607 return lookup_const_fixed (fixed);
610 #if TARGET_SUPPORTS_WIDE_INT == 0
611 /* Constructs double_int from rtx CST. */
613 double_int
614 rtx_to_double_int (const_rtx cst)
616 double_int r;
618 if (CONST_INT_P (cst))
619 r = double_int::from_shwi (INTVAL (cst));
620 else if (CONST_DOUBLE_AS_INT_P (cst))
622 r.low = CONST_DOUBLE_LOW (cst);
623 r.high = CONST_DOUBLE_HIGH (cst);
625 else
626 gcc_unreachable ();
628 return r;
630 #endif
632 #if TARGET_SUPPORTS_WIDE_INT
633 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
634 If so, return its counterpart; otherwise add it to the hash table and
635 return it. */
637 static rtx
638 lookup_const_wide_int (rtx wint)
640 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
641 if (*slot == 0)
642 *slot = wint;
644 return *slot;
646 #endif
648 /* Return an rtx constant for V, given that the constant has mode MODE.
649 The returned rtx will be a CONST_INT if V fits, otherwise it will be
650 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
651 (if TARGET_SUPPORTS_WIDE_INT). */
653 static rtx
654 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
656 unsigned int len = v.get_len ();
657 /* Not scalar_int_mode because we also allow pointer bound modes. */
658 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
660 /* Allow truncation but not extension since we do not know if the
661 number is signed or unsigned. */
662 gcc_assert (prec <= v.get_precision ());
664 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
665 return gen_int_mode (v.elt (0), mode);
667 #if TARGET_SUPPORTS_WIDE_INT
669 unsigned int i;
670 rtx value;
671 unsigned int blocks_needed
672 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
674 if (len > blocks_needed)
675 len = blocks_needed;
677 value = const_wide_int_alloc (len);
679 /* It is so tempting to just put the mode in here. Must control
680 myself ... */
681 PUT_MODE (value, VOIDmode);
682 CWI_PUT_NUM_ELEM (value, len);
684 for (i = 0; i < len; i++)
685 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
687 return lookup_const_wide_int (value);
689 #else
690 return immed_double_const (v.elt (0), v.elt (1), mode);
691 #endif
694 #if TARGET_SUPPORTS_WIDE_INT == 0
695 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
696 of ints: I0 is the low-order word and I1 is the high-order word.
697 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
698 implied upper bits are copies of the high bit of i1. The value
699 itself is neither signed nor unsigned. Do not use this routine for
700 non-integer modes; convert to REAL_VALUE_TYPE and use
701 const_double_from_real_value. */
704 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
706 rtx value;
707 unsigned int i;
709 /* There are the following cases (note that there are no modes with
710 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
712 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
713 gen_int_mode.
714 2) If the value of the integer fits into HOST_WIDE_INT anyway
715 (i.e., i1 consists only from copies of the sign bit, and sign
716 of i0 and i1 are the same), then we return a CONST_INT for i0.
717 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
718 scalar_mode smode;
719 if (is_a <scalar_mode> (mode, &smode)
720 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
721 return gen_int_mode (i0, mode);
723 /* If this integer fits in one word, return a CONST_INT. */
724 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
725 return GEN_INT (i0);
727 /* We use VOIDmode for integers. */
728 value = rtx_alloc (CONST_DOUBLE);
729 PUT_MODE (value, VOIDmode);
731 CONST_DOUBLE_LOW (value) = i0;
732 CONST_DOUBLE_HIGH (value) = i1;
734 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
735 XWINT (value, i) = 0;
737 return lookup_const_double (value);
739 #endif
741 /* Return an rtx representation of C in mode MODE. */
744 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
746 if (c.is_constant ())
747 return immed_wide_int_const_1 (c.coeffs[0], mode);
749 /* Not scalar_int_mode because we also allow pointer bound modes. */
750 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
752 /* Allow truncation but not extension since we do not know if the
753 number is signed or unsigned. */
754 gcc_assert (prec <= c.coeffs[0].get_precision ());
755 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
757 /* See whether we already have an rtx for this constant. */
758 inchash::hash h;
759 h.add_int (mode);
760 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
761 h.add_wide_int (newc.coeffs[i]);
762 const_poly_int_hasher::compare_type typed_value (mode, newc);
763 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
764 h.end (), INSERT);
765 rtx x = *slot;
766 if (x)
767 return x;
769 /* Create a new rtx. There's a choice to be made here between installing
770 the actual mode of the rtx or leaving it as VOIDmode (for consistency
771 with CONST_INT). In practice the handling of the codes is different
772 enough that we get no benefit from using VOIDmode, and various places
773 assume that VOIDmode implies CONST_INT. Using the real mode seems like
774 the right long-term direction anyway. */
775 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
776 size_t extra_size = twi::extra_size (prec);
777 x = rtx_alloc_v (CONST_POLY_INT,
778 sizeof (struct const_poly_int_def) + extra_size);
779 PUT_MODE (x, mode);
780 CONST_POLY_INT_COEFFS (x).set_precision (prec);
781 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
782 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
784 *slot = x;
785 return x;
789 gen_rtx_REG (machine_mode mode, unsigned int regno)
791 /* In case the MD file explicitly references the frame pointer, have
792 all such references point to the same frame pointer. This is
793 used during frame pointer elimination to distinguish the explicit
794 references to these registers from pseudos that happened to be
795 assigned to them.
797 If we have eliminated the frame pointer or arg pointer, we will
798 be using it as a normal register, for example as a spill
799 register. In such cases, we might be accessing it in a mode that
800 is not Pmode and therefore cannot use the pre-allocated rtx.
802 Also don't do this when we are making new REGs in reload, since
803 we don't want to get confused with the real pointers. */
805 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
807 if (regno == FRAME_POINTER_REGNUM
808 && (!reload_completed || frame_pointer_needed))
809 return frame_pointer_rtx;
811 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
812 && regno == HARD_FRAME_POINTER_REGNUM
813 && (!reload_completed || frame_pointer_needed))
814 return hard_frame_pointer_rtx;
815 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
816 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
817 && regno == ARG_POINTER_REGNUM)
818 return arg_pointer_rtx;
819 #endif
820 #ifdef RETURN_ADDRESS_POINTER_REGNUM
821 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
822 return return_address_pointer_rtx;
823 #endif
824 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
825 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
826 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
827 return pic_offset_table_rtx;
828 if (regno == STACK_POINTER_REGNUM)
829 return stack_pointer_rtx;
832 #if 0
833 /* If the per-function register table has been set up, try to re-use
834 an existing entry in that table to avoid useless generation of RTL.
836 This code is disabled for now until we can fix the various backends
837 which depend on having non-shared hard registers in some cases. Long
838 term we want to re-enable this code as it can significantly cut down
839 on the amount of useless RTL that gets generated.
841 We'll also need to fix some code that runs after reload that wants to
842 set ORIGINAL_REGNO. */
844 if (cfun
845 && cfun->emit
846 && regno_reg_rtx
847 && regno < FIRST_PSEUDO_REGISTER
848 && reg_raw_mode[regno] == mode)
849 return regno_reg_rtx[regno];
850 #endif
852 return gen_raw_REG (mode, regno);
856 gen_rtx_MEM (machine_mode mode, rtx addr)
858 rtx rt = gen_rtx_raw_MEM (mode, addr);
860 /* This field is not cleared by the mere allocation of the rtx, so
861 we clear it here. */
862 MEM_ATTRS (rt) = 0;
864 return rt;
867 /* Generate a memory referring to non-trapping constant memory. */
870 gen_const_mem (machine_mode mode, rtx addr)
872 rtx mem = gen_rtx_MEM (mode, addr);
873 MEM_READONLY_P (mem) = 1;
874 MEM_NOTRAP_P (mem) = 1;
875 return mem;
878 /* Generate a MEM referring to fixed portions of the frame, e.g., register
879 save areas. */
882 gen_frame_mem (machine_mode mode, rtx addr)
884 rtx mem = gen_rtx_MEM (mode, addr);
885 MEM_NOTRAP_P (mem) = 1;
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
890 /* Generate a MEM referring to a temporary use of the stack, not part
891 of the fixed stack frame. For example, something which is pushed
892 by a target splitter. */
894 gen_tmp_stack_mem (machine_mode mode, rtx addr)
896 rtx mem = gen_rtx_MEM (mode, addr);
897 MEM_NOTRAP_P (mem) = 1;
898 if (!cfun->calls_alloca)
899 set_mem_alias_set (mem, get_frame_alias_set ());
900 return mem;
903 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
904 this construct would be valid, and false otherwise. */
906 bool
907 validate_subreg (machine_mode omode, machine_mode imode,
908 const_rtx reg, poly_uint64 offset)
910 poly_uint64 isize = GET_MODE_SIZE (imode);
911 poly_uint64 osize = GET_MODE_SIZE (omode);
913 /* The sizes must be ordered, so that we know whether the subreg
914 is partial, paradoxical or complete. */
915 if (!ordered_p (isize, osize))
916 return false;
918 /* All subregs must be aligned. */
919 if (!multiple_p (offset, osize))
920 return false;
922 /* The subreg offset cannot be outside the inner object. */
923 if (maybe_ge (offset, isize))
924 return false;
926 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
928 /* ??? This should not be here. Temporarily continue to allow word_mode
929 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
930 Generally, backends are doing something sketchy but it'll take time to
931 fix them all. */
932 if (omode == word_mode)
934 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
935 is the culprit here, and not the backends. */
936 else if (known_ge (osize, regsize) && known_ge (isize, osize))
938 /* Allow component subregs of complex and vector. Though given the below
939 extraction rules, it's not always clear what that means. */
940 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
941 && GET_MODE_INNER (imode) == omode)
943 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
944 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
945 surely isn't the cleanest way to represent this. It's questionable
946 if this ought to be represented at all -- why can't this all be hidden
947 in post-reload splitters that make arbitrarily mode changes to the
948 registers themselves. */
949 else if (VECTOR_MODE_P (omode)
950 && GET_MODE_UNIT_SIZE (omode) == GET_MODE_UNIT_SIZE (imode))
952 /* Subregs involving floating point modes are not allowed to
953 change size unless it's an insert into a complex mode.
954 Therefore (subreg:DI (reg:DF) 0) and (subreg:CS (reg:SF) 0) are fine, but
955 (subreg:SI (reg:DF) 0) isn't. */
956 else if ((FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
957 && !COMPLEX_MODE_P (omode))
959 if (! (known_eq (isize, osize)
960 /* LRA can use subreg to store a floating point value in
961 an integer mode. Although the floating point and the
962 integer modes need the same number of hard registers,
963 the size of floating point mode can be less than the
964 integer mode. LRA also uses subregs for a register
965 should be used in different mode in on insn. */
966 || lra_in_progress))
967 return false;
970 /* Paradoxical subregs must have offset zero. */
971 if (maybe_gt (osize, isize))
972 return known_eq (offset, 0U);
974 /* This is a normal subreg. Verify that the offset is representable. */
976 /* For hard registers, we already have most of these rules collected in
977 subreg_offset_representable_p. */
978 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
980 unsigned int regno = REGNO (reg);
982 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
983 && GET_MODE_INNER (imode) == omode)
985 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
986 return false;
988 return subreg_offset_representable_p (regno, imode, offset, omode);
990 /* Do not allow SUBREG with stricter alignment than the inner MEM. */
991 else if (reg && MEM_P (reg) && STRICT_ALIGNMENT
992 && MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (omode))
993 return false;
995 /* The outer size must be ordered wrt the register size, otherwise
996 we wouldn't know at compile time how many registers the outer
997 mode occupies. */
998 if (!ordered_p (osize, regsize))
999 return false;
1001 /* For pseudo registers, we want most of the same checks. Namely:
1003 Assume that the pseudo register will be allocated to hard registers
1004 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
1005 the remainder must correspond to the lowpart of the containing hard
1006 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
1007 otherwise it is at the lowest offset.
1009 Given that we've already checked the mode and offset alignment,
1010 we only have to check subblock subregs here. */
1011 if (maybe_lt (osize, regsize)
1012 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1014 /* It is invalid for the target to pick a register size for a mode
1015 that isn't ordered wrt to the size of that mode. */
1016 poly_uint64 block_size = ordered_min (isize, regsize);
1017 unsigned int start_reg;
1018 poly_uint64 offset_within_reg;
1019 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1020 || (BYTES_BIG_ENDIAN
1021 ? maybe_ne (offset_within_reg, block_size - osize)
1022 : maybe_ne (offset_within_reg, 0U)))
1023 return false;
1025 return true;
1029 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1031 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1032 return gen_rtx_raw_SUBREG (mode, reg, offset);
1035 /* Generate a SUBREG representing the least-significant part of REG if MODE
1036 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1039 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1041 machine_mode inmode;
1043 inmode = GET_MODE (reg);
1044 if (inmode == VOIDmode)
1045 inmode = mode;
1046 return gen_rtx_SUBREG (mode, reg,
1047 subreg_lowpart_offset (mode, inmode));
1051 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1052 enum var_init_status status)
1054 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1055 PAT_VAR_LOCATION_STATUS (x) = status;
1056 return x;
1060 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1062 rtvec
1063 gen_rtvec (int n, ...)
1065 int i;
1066 rtvec rt_val;
1067 va_list p;
1069 va_start (p, n);
1071 /* Don't allocate an empty rtvec... */
1072 if (n == 0)
1074 va_end (p);
1075 return NULL_RTVEC;
1078 rt_val = rtvec_alloc (n);
1080 for (i = 0; i < n; i++)
1081 rt_val->elem[i] = va_arg (p, rtx);
1083 va_end (p);
1084 return rt_val;
1087 rtvec
1088 gen_rtvec_v (int n, rtx *argp)
1090 int i;
1091 rtvec rt_val;
1093 /* Don't allocate an empty rtvec... */
1094 if (n == 0)
1095 return NULL_RTVEC;
1097 rt_val = rtvec_alloc (n);
1099 for (i = 0; i < n; i++)
1100 rt_val->elem[i] = *argp++;
1102 return rt_val;
1105 rtvec
1106 gen_rtvec_v (int n, rtx_insn **argp)
1108 int i;
1109 rtvec rt_val;
1111 /* Don't allocate an empty rtvec... */
1112 if (n == 0)
1113 return NULL_RTVEC;
1115 rt_val = rtvec_alloc (n);
1117 for (i = 0; i < n; i++)
1118 rt_val->elem[i] = *argp++;
1120 return rt_val;
1124 /* Return the number of bytes between the start of an OUTER_MODE
1125 in-memory value and the start of an INNER_MODE in-memory value,
1126 given that the former is a lowpart of the latter. It may be a
1127 paradoxical lowpart, in which case the offset will be negative
1128 on big-endian targets. */
1130 poly_int64
1131 byte_lowpart_offset (machine_mode outer_mode,
1132 machine_mode inner_mode)
1134 if (paradoxical_subreg_p (outer_mode, inner_mode))
1135 return -subreg_lowpart_offset (inner_mode, outer_mode);
1136 else
1137 return subreg_lowpart_offset (outer_mode, inner_mode);
1140 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1141 from address X. For paradoxical big-endian subregs this is a
1142 negative value, otherwise it's the same as OFFSET. */
1144 poly_int64
1145 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1146 poly_uint64 offset)
1148 if (paradoxical_subreg_p (outer_mode, inner_mode))
1150 gcc_assert (known_eq (offset, 0U));
1151 return -subreg_lowpart_offset (inner_mode, outer_mode);
1153 return offset;
1156 /* As above, but return the offset that existing subreg X would have
1157 if SUBREG_REG (X) were stored in memory. The only significant thing
1158 about the current SUBREG_REG is its mode. */
1160 poly_int64
1161 subreg_memory_offset (const_rtx x)
1163 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1164 SUBREG_BYTE (x));
1167 /* Generate a REG rtx for a new pseudo register of mode MODE.
1168 This pseudo is assigned the next sequential register number. */
1171 gen_reg_rtx (machine_mode mode)
1173 rtx val;
1174 unsigned int align = GET_MODE_ALIGNMENT (mode);
1176 gcc_assert (can_create_pseudo_p ());
1178 /* If a virtual register with bigger mode alignment is generated,
1179 increase stack alignment estimation because it might be spilled
1180 to stack later. */
1181 if (SUPPORTS_STACK_ALIGNMENT
1182 && crtl->stack_alignment_estimated < align
1183 && !crtl->stack_realign_processed)
1185 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1186 if (crtl->stack_alignment_estimated < min_align)
1187 crtl->stack_alignment_estimated = min_align;
1190 if (generating_concat_p
1191 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1192 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1194 /* For complex modes, don't make a single pseudo.
1195 Instead, make a CONCAT of two pseudos.
1196 This allows noncontiguous allocation of the real and imaginary parts,
1197 which makes much better code. Besides, allocating DCmode
1198 pseudos overstrains reload on some machines like the 386. */
1199 rtx realpart, imagpart;
1200 machine_mode partmode = GET_MODE_INNER (mode);
1202 realpart = gen_reg_rtx (partmode);
1203 imagpart = gen_reg_rtx (partmode);
1204 return gen_rtx_CONCAT (mode, realpart, imagpart);
1207 /* Do not call gen_reg_rtx with uninitialized crtl. */
1208 gcc_assert (crtl->emit.regno_pointer_align_length);
1210 crtl->emit.ensure_regno_capacity ();
1211 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1213 val = gen_raw_REG (mode, reg_rtx_no);
1214 regno_reg_rtx[reg_rtx_no++] = val;
1215 return val;
1218 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1219 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1221 void
1222 emit_status::ensure_regno_capacity ()
1224 int old_size = regno_pointer_align_length;
1226 if (reg_rtx_no < old_size)
1227 return;
1229 int new_size = old_size * 2;
1230 while (reg_rtx_no >= new_size)
1231 new_size *= 2;
1233 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1234 memset (tmp + old_size, 0, new_size - old_size);
1235 regno_pointer_align = (unsigned char *) tmp;
1237 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1238 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1239 regno_reg_rtx = new1;
1241 crtl->emit.regno_pointer_align_length = new_size;
1244 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1246 bool
1247 reg_is_parm_p (rtx reg)
1249 tree decl;
1251 gcc_assert (REG_P (reg));
1252 decl = REG_EXPR (reg);
1253 return (decl && TREE_CODE (decl) == PARM_DECL);
1256 /* Update NEW with the same attributes as REG, but with OFFSET added
1257 to the REG_OFFSET. */
1259 static void
1260 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1262 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1263 REG_OFFSET (reg) + offset);
1266 /* Generate a register with same attributes as REG, but with OFFSET
1267 added to the REG_OFFSET. */
1270 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1271 poly_int64 offset)
1273 /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
1274 overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
1275 of the shared REG rtxes like stack_pointer_rtx etc. This should
1276 happen only for SUBREGs from DEBUG_INSNs, RA should ensure
1277 multi-word registers don't overlap the special registers like
1278 stack pointer. */
1279 rtx new_rtx = gen_raw_REG (mode, regno);
1281 update_reg_offset (new_rtx, reg, offset);
1282 return new_rtx;
1285 /* Generate a new pseudo-register with the same attributes as REG, but
1286 with OFFSET added to the REG_OFFSET. */
1289 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1291 rtx new_rtx = gen_reg_rtx (mode);
1293 update_reg_offset (new_rtx, reg, offset);
1294 return new_rtx;
1297 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1298 new register is a (possibly paradoxical) lowpart of the old one. */
1300 void
1301 adjust_reg_mode (rtx reg, machine_mode mode)
1303 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1304 PUT_MODE (reg, mode);
1307 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1308 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1310 void
1311 set_reg_attrs_from_value (rtx reg, rtx x)
1313 poly_int64 offset;
1314 bool can_be_reg_pointer = true;
1316 /* Don't call mark_reg_pointer for incompatible pointer sign
1317 extension. */
1318 while (GET_CODE (x) == SIGN_EXTEND
1319 || GET_CODE (x) == ZERO_EXTEND
1320 || GET_CODE (x) == TRUNCATE
1321 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1323 #if defined(POINTERS_EXTEND_UNSIGNED)
1324 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1325 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1326 || (paradoxical_subreg_p (x)
1327 && ! (SUBREG_PROMOTED_VAR_P (x)
1328 && SUBREG_CHECK_PROMOTED_SIGN (x,
1329 POINTERS_EXTEND_UNSIGNED))))
1330 && !targetm.have_ptr_extend ())
1331 can_be_reg_pointer = false;
1332 #endif
1333 x = XEXP (x, 0);
1336 /* Hard registers can be reused for multiple purposes within the same
1337 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1338 on them is wrong. */
1339 if (HARD_REGISTER_P (reg))
1340 return;
1342 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1343 if (MEM_P (x))
1345 if (MEM_OFFSET_KNOWN_P (x))
1346 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1347 MEM_OFFSET (x) + offset);
1348 if (can_be_reg_pointer && MEM_POINTER (x))
1349 mark_reg_pointer (reg, 0);
1351 else if (REG_P (x))
1353 if (REG_ATTRS (x))
1354 update_reg_offset (reg, x, offset);
1355 if (can_be_reg_pointer && REG_POINTER (x))
1356 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1360 /* Generate a REG rtx for a new pseudo register, copying the mode
1361 and attributes from X. */
1364 gen_reg_rtx_and_attrs (rtx x)
1366 rtx reg = gen_reg_rtx (GET_MODE (x));
1367 set_reg_attrs_from_value (reg, x);
1368 return reg;
1371 /* Set the register attributes for registers contained in PARM_RTX.
1372 Use needed values from memory attributes of MEM. */
1374 void
1375 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1377 if (REG_P (parm_rtx))
1378 set_reg_attrs_from_value (parm_rtx, mem);
1379 else if (GET_CODE (parm_rtx) == PARALLEL)
1381 /* Check for a NULL entry in the first slot, used to indicate that the
1382 parameter goes both on the stack and in registers. */
1383 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1384 for (; i < XVECLEN (parm_rtx, 0); i++)
1386 rtx x = XVECEXP (parm_rtx, 0, i);
1387 if (REG_P (XEXP (x, 0)))
1388 REG_ATTRS (XEXP (x, 0))
1389 = get_reg_attrs (MEM_EXPR (mem),
1390 INTVAL (XEXP (x, 1)));
1395 /* Set the REG_ATTRS for registers in value X, given that X represents
1396 decl T. */
1398 void
1399 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1401 if (!t)
1402 return;
1403 tree tdecl = t;
1404 if (GET_CODE (x) == SUBREG)
1406 gcc_assert (subreg_lowpart_p (x));
1407 x = SUBREG_REG (x);
1409 if (REG_P (x))
1410 REG_ATTRS (x)
1411 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1412 DECL_P (tdecl)
1413 ? DECL_MODE (tdecl)
1414 : TYPE_MODE (TREE_TYPE (tdecl))));
1415 if (GET_CODE (x) == CONCAT)
1417 if (REG_P (XEXP (x, 0)))
1418 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1419 if (REG_P (XEXP (x, 1)))
1420 REG_ATTRS (XEXP (x, 1))
1421 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1423 if (GET_CODE (x) == PARALLEL)
1425 int i, start;
1427 /* Check for a NULL entry, used to indicate that the parameter goes
1428 both on the stack and in registers. */
1429 if (XEXP (XVECEXP (x, 0, 0), 0))
1430 start = 0;
1431 else
1432 start = 1;
1434 for (i = start; i < XVECLEN (x, 0); i++)
1436 rtx y = XVECEXP (x, 0, i);
1437 if (REG_P (XEXP (y, 0)))
1438 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1443 /* Assign the RTX X to declaration T. */
1445 void
1446 set_decl_rtl (tree t, rtx x)
1448 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1449 if (x)
1450 set_reg_attrs_for_decl_rtl (t, x);
1453 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1454 if the ABI requires the parameter to be passed by reference. */
1456 void
1457 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1459 DECL_INCOMING_RTL (t) = x;
1460 if (x && !by_reference_p)
1461 set_reg_attrs_for_decl_rtl (t, x);
1464 /* Identify REG (which may be a CONCAT) as a user register. */
1466 void
1467 mark_user_reg (rtx reg)
1469 if (GET_CODE (reg) == CONCAT)
1471 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1472 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1474 else
1476 gcc_assert (REG_P (reg));
1477 REG_USERVAR_P (reg) = 1;
1481 /* Identify REG as a probable pointer register and show its alignment
1482 as ALIGN, if nonzero. */
1484 void
1485 mark_reg_pointer (rtx reg, int align)
1487 if (! REG_POINTER (reg))
1489 REG_POINTER (reg) = 1;
1491 if (align)
1492 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1494 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1495 /* We can no-longer be sure just how aligned this pointer is. */
1496 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1499 /* Return 1 plus largest pseudo reg number used in the current function. */
1502 max_reg_num (void)
1504 return reg_rtx_no;
1507 /* Return 1 + the largest label number used so far in the current function. */
1510 max_label_num (void)
1512 return label_num;
1515 /* Return first label number used in this function (if any were used). */
1518 get_first_label_num (void)
1520 return first_label_num;
1523 /* If the rtx for label was created during the expansion of a nested
1524 function, then first_label_num won't include this label number.
1525 Fix this now so that array indices work later. */
1527 void
1528 maybe_set_first_label_num (rtx_code_label *x)
1530 if (CODE_LABEL_NUMBER (x) < first_label_num)
1531 first_label_num = CODE_LABEL_NUMBER (x);
1534 /* For use by the RTL function loader, when mingling with normal
1535 functions.
1536 Ensure that label_num is greater than the label num of X, to avoid
1537 duplicate labels in the generated assembler. */
1539 void
1540 maybe_set_max_label_num (rtx_code_label *x)
1542 if (CODE_LABEL_NUMBER (x) >= label_num)
1543 label_num = CODE_LABEL_NUMBER (x) + 1;
1547 /* Return a value representing some low-order bits of X, where the number
1548 of low-order bits is given by MODE. Note that no conversion is done
1549 between floating-point and fixed-point values, rather, the bit
1550 representation is returned.
1552 This function handles the cases in common between gen_lowpart, below,
1553 and two variants in cse.cc and combine.cc. These are the cases that can
1554 be safely handled at all points in the compilation.
1556 If this is not a case we can handle, return 0. */
1559 gen_lowpart_common (machine_mode mode, rtx x)
1561 poly_uint64 msize = GET_MODE_SIZE (mode);
1562 machine_mode innermode;
1564 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1565 so we have to make one up. Yuk. */
1566 innermode = GET_MODE (x);
1567 if (CONST_INT_P (x)
1568 && known_le (msize * BITS_PER_UNIT,
1569 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1570 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1571 else if (innermode == VOIDmode)
1572 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1574 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1576 if (innermode == mode)
1577 return x;
1579 /* The size of the outer and inner modes must be ordered. */
1580 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1581 if (!ordered_p (msize, xsize))
1582 return 0;
1584 if (SCALAR_FLOAT_MODE_P (mode))
1586 /* Don't allow paradoxical FLOAT_MODE subregs. */
1587 if (maybe_gt (msize, xsize))
1588 return 0;
1590 else
1592 /* MODE must occupy no more of the underlying registers than X. */
1593 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1594 unsigned int mregs, xregs;
1595 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1596 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1597 || mregs > xregs)
1598 return 0;
1601 scalar_int_mode int_mode, int_innermode, from_mode;
1602 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1603 && is_a <scalar_int_mode> (mode, &int_mode)
1604 && is_a <scalar_int_mode> (innermode, &int_innermode)
1605 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1607 /* If we are getting the low-order part of something that has been
1608 sign- or zero-extended, we can either just use the object being
1609 extended or make a narrower extension. If we want an even smaller
1610 piece than the size of the object being extended, call ourselves
1611 recursively.
1613 This case is used mostly by combine and cse. */
1615 if (from_mode == int_mode)
1616 return XEXP (x, 0);
1617 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1618 return gen_lowpart_common (int_mode, XEXP (x, 0));
1619 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1620 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1622 else if (GET_CODE (x) == SUBREG || REG_P (x)
1623 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1624 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1625 || CONST_POLY_INT_P (x))
1626 return lowpart_subreg (mode, x, innermode);
1628 /* Otherwise, we can't do this. */
1629 return 0;
1633 gen_highpart (machine_mode mode, rtx x)
1635 poly_uint64 msize = GET_MODE_SIZE (mode);
1636 rtx result;
1638 /* This case loses if X is a subreg. To catch bugs early,
1639 complain if an invalid MODE is used even in other cases. */
1640 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1641 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1643 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1644 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1645 it will produce something meaningful. The only case we need to handle
1646 specially here is MEM. */
1647 if (MEM_P (x))
1649 poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
1650 return adjust_address (x, mode, offset);
1653 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1654 subreg_highpart_offset (mode, GET_MODE (x)));
1655 /* Since we handle MEM directly above, we should never get a MEM back
1656 from simplify_gen_subreg. */
1657 gcc_assert (result && !MEM_P (result));
1659 return result;
1662 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1663 be VOIDmode constant. */
1665 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1667 if (GET_MODE (exp) != VOIDmode)
1669 gcc_assert (GET_MODE (exp) == innermode);
1670 return gen_highpart (outermode, exp);
1672 return simplify_gen_subreg (outermode, exp, innermode,
1673 subreg_highpart_offset (outermode, innermode));
1676 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1677 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1679 poly_uint64
1680 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1682 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1683 if (maybe_gt (outer_bytes, inner_bytes))
1684 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1685 return 0;
1687 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1688 return inner_bytes - outer_bytes;
1689 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1690 return 0;
1691 else
1692 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1695 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1696 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1698 poly_uint64
1699 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1701 gcc_assert (known_ge (inner_bytes, outer_bytes));
1703 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1704 return 0;
1705 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1706 return inner_bytes - outer_bytes;
1707 else
1708 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1709 (inner_bytes - outer_bytes)
1710 * BITS_PER_UNIT);
1713 /* Return true iff X, assumed to be a SUBREG,
1714 refers to the least significant part of its containing reg.
1715 If X is not a SUBREG, always return true (it is its own low part!). */
1717 bool
1718 subreg_lowpart_p (const_rtx x)
1720 if (GET_CODE (x) != SUBREG)
1721 return true;
1722 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1723 return false;
1725 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1726 GET_MODE (SUBREG_REG (x))),
1727 SUBREG_BYTE (x));
1730 /* Return subword OFFSET of operand OP.
1731 The word number, OFFSET, is interpreted as the word number starting
1732 at the low-order address. OFFSET 0 is the low-order word if not
1733 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1735 If we cannot extract the required word, we return zero. Otherwise,
1736 an rtx corresponding to the requested word will be returned.
1738 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1739 reload has completed, a valid address will always be returned. After
1740 reload, if a valid address cannot be returned, we return zero.
1742 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1743 it is the responsibility of the caller.
1745 MODE is the mode of OP in case it is a CONST_INT.
1747 ??? This is still rather broken for some cases. The problem for the
1748 moment is that all callers of this thing provide no 'goal mode' to
1749 tell us to work with. This exists because all callers were written
1750 in a word based SUBREG world.
1751 Now use of this function can be deprecated by simplify_subreg in most
1752 cases.
1756 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1757 machine_mode mode)
1759 if (mode == VOIDmode)
1760 mode = GET_MODE (op);
1762 gcc_assert (mode != VOIDmode);
1764 /* If OP is narrower than a word, fail. */
1765 if (mode != BLKmode
1766 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1767 return 0;
1769 /* If we want a word outside OP, return zero. */
1770 if (mode != BLKmode
1771 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1772 return const0_rtx;
1774 /* Form a new MEM at the requested address. */
1775 if (MEM_P (op))
1777 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1779 if (! validate_address)
1780 return new_rtx;
1782 else if (reload_completed)
1784 if (! strict_memory_address_addr_space_p (word_mode,
1785 XEXP (new_rtx, 0),
1786 MEM_ADDR_SPACE (op)))
1787 return 0;
1789 else
1790 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1793 /* Rest can be handled by simplify_subreg. */
1794 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1797 /* Similar to `operand_subword', but never return 0. If we can't
1798 extract the required subword, put OP into a register and try again.
1799 The second attempt must succeed. We always validate the address in
1800 this case.
1802 MODE is the mode of OP, in case it is CONST_INT. */
1805 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1807 rtx result = operand_subword (op, offset, 1, mode);
1809 if (result)
1810 return result;
1812 if (mode != BLKmode && mode != VOIDmode)
1814 /* If this is a register which cannot be accessed by words, copy it
1815 to a pseudo register. */
1816 if (REG_P (op))
1817 op = copy_to_reg (op);
1818 else
1819 op = force_reg (mode, op);
1822 result = operand_subword (op, offset, 1, mode);
1823 gcc_assert (result);
1825 return result;
1828 mem_attrs::mem_attrs ()
1829 : expr (NULL_TREE),
1830 offset (0),
1831 size (0),
1832 alias (0),
1833 align (0),
1834 addrspace (ADDR_SPACE_GENERIC),
1835 offset_known_p (false),
1836 size_known_p (false)
1839 /* Returns true if both MEM_EXPR can be considered equal
1840 and false otherwise. */
1842 bool
1843 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1845 if (expr1 == expr2)
1846 return true;
1848 if (! expr1 || ! expr2)
1849 return false;
1851 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1852 return false;
1854 return operand_equal_p (expr1, expr2, 0);
1857 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1858 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1859 -1 if not known. */
1862 get_mem_align_offset (rtx mem, unsigned int align)
1864 tree expr;
1865 poly_uint64 offset;
1867 /* This function can't use
1868 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1869 || (MAX (MEM_ALIGN (mem),
1870 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1871 < align))
1872 return -1;
1873 else
1874 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1875 for two reasons:
1876 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1877 for <variable>. get_inner_reference doesn't handle it and
1878 even if it did, the alignment in that case needs to be determined
1879 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1880 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1881 isn't sufficiently aligned, the object it is in might be. */
1882 gcc_assert (MEM_P (mem));
1883 expr = MEM_EXPR (mem);
1884 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1885 return -1;
1887 offset = MEM_OFFSET (mem);
1888 if (DECL_P (expr))
1890 if (DECL_ALIGN (expr) < align)
1891 return -1;
1893 else if (INDIRECT_REF_P (expr))
1895 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1896 return -1;
1898 else if (TREE_CODE (expr) == COMPONENT_REF)
1900 while (1)
1902 tree inner = TREE_OPERAND (expr, 0);
1903 tree field = TREE_OPERAND (expr, 1);
1904 tree byte_offset = component_ref_field_offset (expr);
1905 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1907 poly_uint64 suboffset;
1908 if (!byte_offset
1909 || !poly_int_tree_p (byte_offset, &suboffset)
1910 || !tree_fits_uhwi_p (bit_offset))
1911 return -1;
1913 offset += suboffset;
1914 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1916 if (inner == NULL_TREE)
1918 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1919 < (unsigned int) align)
1920 return -1;
1921 break;
1923 else if (DECL_P (inner))
1925 if (DECL_ALIGN (inner) < align)
1926 return -1;
1927 break;
1929 else if (TREE_CODE (inner) != COMPONENT_REF)
1930 return -1;
1931 expr = inner;
1934 else
1935 return -1;
1937 HOST_WIDE_INT misalign;
1938 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1939 return -1;
1940 return misalign;
1943 /* Given REF (a MEM) and T, either the type of X or the expression
1944 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1945 if we are making a new object of this type. BITPOS is nonzero if
1946 there is an offset outstanding on T that will be applied later. */
1948 void
1949 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1950 poly_int64 bitpos)
1952 poly_int64 apply_bitpos = 0;
1953 tree type;
1954 class mem_attrs attrs, *defattrs, *refattrs;
1955 addr_space_t as;
1957 /* It can happen that type_for_mode was given a mode for which there
1958 is no language-level type. In which case it returns NULL, which
1959 we can see here. */
1960 if (t == NULL_TREE)
1961 return;
1963 type = TYPE_P (t) ? t : TREE_TYPE (t);
1964 if (type == error_mark_node)
1965 return;
1967 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1968 wrong answer, as it assumes that DECL_RTL already has the right alias
1969 info. Callers should not set DECL_RTL until after the call to
1970 set_mem_attributes. */
1971 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1973 /* Get the alias set from the expression or type (perhaps using a
1974 front-end routine) and use it. */
1975 attrs.alias = get_alias_set (t);
1977 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1978 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1980 /* Default values from pre-existing memory attributes if present. */
1981 refattrs = MEM_ATTRS (ref);
1982 if (refattrs)
1984 /* ??? Can this ever happen? Calling this routine on a MEM that
1985 already carries memory attributes should probably be invalid. */
1986 attrs.expr = refattrs->expr;
1987 attrs.offset_known_p = refattrs->offset_known_p;
1988 attrs.offset = refattrs->offset;
1989 attrs.size_known_p = refattrs->size_known_p;
1990 attrs.size = refattrs->size;
1991 attrs.align = refattrs->align;
1994 /* Otherwise, default values from the mode of the MEM reference. */
1995 else
1997 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1998 gcc_assert (!defattrs->expr);
1999 gcc_assert (!defattrs->offset_known_p);
2001 /* Respect mode size. */
2002 attrs.size_known_p = defattrs->size_known_p;
2003 attrs.size = defattrs->size;
2004 /* ??? Is this really necessary? We probably should always get
2005 the size from the type below. */
2007 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
2008 if T is an object, always compute the object alignment below. */
2009 if (TYPE_P (t))
2010 attrs.align = defattrs->align;
2011 else
2012 attrs.align = BITS_PER_UNIT;
2013 /* ??? If T is a type, respecting mode alignment may *also* be wrong
2014 e.g. if the type carries an alignment attribute. Should we be
2015 able to simply always use TYPE_ALIGN? */
2018 /* We can set the alignment from the type if we are making an object or if
2019 this is an INDIRECT_REF. */
2020 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2021 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2023 /* If the size is known, we can set that. */
2024 tree new_size = TYPE_SIZE_UNIT (type);
2026 /* The address-space is that of the type. */
2027 as = TYPE_ADDR_SPACE (type);
2029 /* If T is not a type, we may be able to deduce some more information about
2030 the expression. */
2031 if (! TYPE_P (t))
2033 tree base;
2035 if (TREE_THIS_VOLATILE (t))
2036 MEM_VOLATILE_P (ref) = 1;
2038 /* Now remove any conversions: they don't change what the underlying
2039 object is. Likewise for SAVE_EXPR. */
2040 while (CONVERT_EXPR_P (t)
2041 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2042 || TREE_CODE (t) == SAVE_EXPR)
2043 t = TREE_OPERAND (t, 0);
2045 /* Note whether this expression can trap. */
2046 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2048 base = get_base_address (t);
2049 if (base)
2051 if (DECL_P (base)
2052 && TREE_READONLY (base)
2053 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2054 && !TREE_THIS_VOLATILE (base))
2055 MEM_READONLY_P (ref) = 1;
2057 /* Mark static const strings readonly as well. */
2058 if (TREE_CODE (base) == STRING_CST
2059 && TREE_READONLY (base)
2060 && TREE_STATIC (base))
2061 MEM_READONLY_P (ref) = 1;
2063 /* Address-space information is on the base object. */
2064 if (TREE_CODE (base) == MEM_REF
2065 || TREE_CODE (base) == TARGET_MEM_REF)
2066 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2067 0))));
2068 else
2069 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2072 /* If this expression uses it's parent's alias set, mark it such
2073 that we won't change it. */
2074 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2075 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2077 /* If this is a decl, set the attributes of the MEM from it. */
2078 if (DECL_P (t))
2080 attrs.expr = t;
2081 attrs.offset_known_p = true;
2082 attrs.offset = 0;
2083 apply_bitpos = bitpos;
2084 new_size = DECL_SIZE_UNIT (t);
2087 /* ??? If we end up with a constant or a descriptor do not
2088 record a MEM_EXPR. */
2089 else if (CONSTANT_CLASS_P (t)
2090 || TREE_CODE (t) == CONSTRUCTOR)
2093 /* If this is a field reference, record it. */
2094 else if (TREE_CODE (t) == COMPONENT_REF)
2096 attrs.expr = t;
2097 attrs.offset_known_p = true;
2098 attrs.offset = 0;
2099 apply_bitpos = bitpos;
2100 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2101 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2104 /* Else record it. */
2105 else
2107 gcc_assert (handled_component_p (t)
2108 || TREE_CODE (t) == MEM_REF
2109 || TREE_CODE (t) == TARGET_MEM_REF);
2110 attrs.expr = t;
2111 attrs.offset_known_p = true;
2112 attrs.offset = 0;
2113 apply_bitpos = bitpos;
2116 /* If this is a reference based on a partitioned decl replace the
2117 base with a MEM_REF of the pointer representative we created
2118 during stack slot partitioning. */
2119 if (attrs.expr
2120 && VAR_P (base)
2121 && ! is_global_var (base)
2122 && cfun->gimple_df->decls_to_pointers != NULL)
2124 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2125 if (namep)
2127 attrs.expr = unshare_expr (attrs.expr);
2128 tree *orig_base = &attrs.expr;
2129 while (handled_component_p (*orig_base))
2130 orig_base = &TREE_OPERAND (*orig_base, 0);
2131 tree aptrt = reference_alias_ptr_type (*orig_base);
2132 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2133 build_int_cst (aptrt, 0));
2137 /* Compute the alignment. */
2138 unsigned int obj_align;
2139 unsigned HOST_WIDE_INT obj_bitpos;
2140 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2141 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2142 if (diff_align != 0)
2143 obj_align = MIN (obj_align, diff_align);
2144 attrs.align = MAX (attrs.align, obj_align);
2147 poly_uint64 const_size;
2148 if (poly_int_tree_p (new_size, &const_size))
2150 attrs.size_known_p = true;
2151 attrs.size = const_size;
2154 /* If we modified OFFSET based on T, then subtract the outstanding
2155 bit position offset. Similarly, increase the size of the accessed
2156 object to contain the negative offset. */
2157 if (maybe_ne (apply_bitpos, 0))
2159 gcc_assert (attrs.offset_known_p);
2160 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2161 attrs.offset -= bytepos;
2162 if (attrs.size_known_p)
2163 attrs.size += bytepos;
2166 /* Now set the attributes we computed above. */
2167 attrs.addrspace = as;
2168 set_mem_attrs (ref, &attrs);
2171 void
2172 set_mem_attributes (rtx ref, tree t, int objectp)
2174 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2177 /* Set the alias set of MEM to SET. */
2179 void
2180 set_mem_alias_set (rtx mem, alias_set_type set)
2182 /* If the new and old alias sets don't conflict, something is wrong. */
2183 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2184 mem_attrs attrs (*get_mem_attrs (mem));
2185 attrs.alias = set;
2186 set_mem_attrs (mem, &attrs);
2189 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2191 void
2192 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2194 mem_attrs attrs (*get_mem_attrs (mem));
2195 attrs.addrspace = addrspace;
2196 set_mem_attrs (mem, &attrs);
2199 /* Set the alignment of MEM to ALIGN bits. */
2201 void
2202 set_mem_align (rtx mem, unsigned int align)
2204 mem_attrs attrs (*get_mem_attrs (mem));
2205 attrs.align = align;
2206 set_mem_attrs (mem, &attrs);
2209 /* Set the expr for MEM to EXPR. */
2211 void
2212 set_mem_expr (rtx mem, tree expr)
2214 mem_attrs attrs (*get_mem_attrs (mem));
2215 attrs.expr = expr;
2216 set_mem_attrs (mem, &attrs);
2219 /* Set the offset of MEM to OFFSET. */
2221 void
2222 set_mem_offset (rtx mem, poly_int64 offset)
2224 mem_attrs attrs (*get_mem_attrs (mem));
2225 attrs.offset_known_p = true;
2226 attrs.offset = offset;
2227 set_mem_attrs (mem, &attrs);
2230 /* Clear the offset of MEM. */
2232 void
2233 clear_mem_offset (rtx mem)
2235 mem_attrs attrs (*get_mem_attrs (mem));
2236 attrs.offset_known_p = false;
2237 set_mem_attrs (mem, &attrs);
2240 /* Set the size of MEM to SIZE. */
2242 void
2243 set_mem_size (rtx mem, poly_int64 size)
2245 mem_attrs attrs (*get_mem_attrs (mem));
2246 attrs.size_known_p = true;
2247 attrs.size = size;
2248 set_mem_attrs (mem, &attrs);
2251 /* Clear the size of MEM. */
2253 void
2254 clear_mem_size (rtx mem)
2256 mem_attrs attrs (*get_mem_attrs (mem));
2257 attrs.size_known_p = false;
2258 set_mem_attrs (mem, &attrs);
2261 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2262 and its address changed to ADDR. (VOIDmode means don't change the mode.
2263 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2264 returned memory location is required to be valid. INPLACE is true if any
2265 changes can be made directly to MEMREF or false if MEMREF must be treated
2266 as immutable.
2268 The memory attributes are not changed. */
2270 static rtx
2271 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2272 bool inplace)
2274 addr_space_t as;
2275 rtx new_rtx;
2277 gcc_assert (MEM_P (memref));
2278 as = MEM_ADDR_SPACE (memref);
2279 if (mode == VOIDmode)
2280 mode = GET_MODE (memref);
2281 if (addr == 0)
2282 addr = XEXP (memref, 0);
2283 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2284 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2285 return memref;
2287 /* Don't validate address for LRA. LRA can make the address valid
2288 by itself in most efficient way. */
2289 if (validate && !lra_in_progress)
2291 if (reload_in_progress || reload_completed)
2292 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2293 else
2294 addr = memory_address_addr_space (mode, addr, as);
2297 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2298 return memref;
2300 if (inplace)
2302 XEXP (memref, 0) = addr;
2303 return memref;
2306 new_rtx = gen_rtx_MEM (mode, addr);
2307 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2308 return new_rtx;
2311 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2312 way we are changing MEMREF, so we only preserve the alias set. */
2315 change_address (rtx memref, machine_mode mode, rtx addr)
2317 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2318 machine_mode mmode = GET_MODE (new_rtx);
2319 class mem_attrs *defattrs;
2321 mem_attrs attrs (*get_mem_attrs (memref));
2322 defattrs = mode_mem_attrs[(int) mmode];
2323 attrs.expr = NULL_TREE;
2324 attrs.offset_known_p = false;
2325 attrs.size_known_p = defattrs->size_known_p;
2326 attrs.size = defattrs->size;
2327 attrs.align = defattrs->align;
2329 /* If there are no changes, just return the original memory reference. */
2330 if (new_rtx == memref)
2332 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2333 return new_rtx;
2335 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2336 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2339 set_mem_attrs (new_rtx, &attrs);
2340 return new_rtx;
2343 /* Return a memory reference like MEMREF, but with its mode changed
2344 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2345 nonzero, the memory address is forced to be valid.
2346 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2347 and the caller is responsible for adjusting MEMREF base register.
2348 If ADJUST_OBJECT is zero, the underlying object associated with the
2349 memory reference is left unchanged and the caller is responsible for
2350 dealing with it. Otherwise, if the new memory reference is outside
2351 the underlying object, even partially, then the object is dropped.
2352 SIZE, if nonzero, is the size of an access in cases where MODE
2353 has no inherent size. */
2356 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2357 int validate, int adjust_address, int adjust_object,
2358 poly_int64 size)
2360 rtx addr = XEXP (memref, 0);
2361 rtx new_rtx;
2362 scalar_int_mode address_mode;
2363 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2364 unsigned HOST_WIDE_INT max_align;
2365 #ifdef POINTERS_EXTEND_UNSIGNED
2366 scalar_int_mode pointer_mode
2367 = targetm.addr_space.pointer_mode (attrs.addrspace);
2368 #endif
2370 /* VOIDmode means no mode change for change_address_1. */
2371 if (mode == VOIDmode)
2372 mode = GET_MODE (memref);
2374 /* Take the size of non-BLKmode accesses from the mode. */
2375 defattrs = mode_mem_attrs[(int) mode];
2376 if (defattrs->size_known_p)
2377 size = defattrs->size;
2379 /* If there are no changes, just return the original memory reference. */
2380 if (mode == GET_MODE (memref)
2381 && known_eq (offset, 0)
2382 && (known_eq (size, 0)
2383 || (attrs.size_known_p && known_eq (attrs.size, size)))
2384 && (!validate || memory_address_addr_space_p (mode, addr,
2385 attrs.addrspace)))
2386 return memref;
2388 /* ??? Prefer to create garbage instead of creating shared rtl.
2389 This may happen even if offset is nonzero -- consider
2390 (plus (plus reg reg) const_int) -- so do this always. */
2391 addr = copy_rtx (addr);
2393 /* Convert a possibly large offset to a signed value within the
2394 range of the target address space. */
2395 address_mode = get_address_mode (memref);
2396 offset = trunc_int_for_mode (offset, address_mode);
2398 if (adjust_address)
2400 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2401 object, we can merge it into the LO_SUM. */
2402 if (GET_MODE (memref) != BLKmode
2403 && GET_CODE (addr) == LO_SUM
2404 && known_in_range_p (offset,
2405 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2406 / BITS_PER_UNIT)))
2407 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2408 plus_constant (address_mode,
2409 XEXP (addr, 1), offset));
2410 #ifdef POINTERS_EXTEND_UNSIGNED
2411 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2412 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2413 the fact that pointers are not allowed to overflow. */
2414 else if (POINTERS_EXTEND_UNSIGNED > 0
2415 && GET_CODE (addr) == ZERO_EXTEND
2416 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2417 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2418 addr = gen_rtx_ZERO_EXTEND (address_mode,
2419 plus_constant (pointer_mode,
2420 XEXP (addr, 0), offset));
2421 #endif
2422 else
2423 addr = plus_constant (address_mode, addr, offset);
2426 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2428 /* If the address is a REG, change_address_1 rightfully returns memref,
2429 but this would destroy memref's MEM_ATTRS. */
2430 if (new_rtx == memref && maybe_ne (offset, 0))
2431 new_rtx = copy_rtx (new_rtx);
2433 /* Conservatively drop the object if we don't know where we start from. */
2434 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2436 attrs.expr = NULL_TREE;
2437 attrs.alias = 0;
2440 /* Compute the new values of the memory attributes due to this adjustment.
2441 We add the offsets and update the alignment. */
2442 if (attrs.offset_known_p)
2444 attrs.offset += offset;
2446 /* Drop the object if the new left end is not within its bounds. */
2447 if (adjust_object && maybe_lt (attrs.offset, 0))
2449 attrs.expr = NULL_TREE;
2450 attrs.alias = 0;
2454 /* Compute the new alignment by taking the MIN of the alignment and the
2455 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2456 if zero. */
2457 if (maybe_ne (offset, 0))
2459 max_align = known_alignment (offset) * BITS_PER_UNIT;
2460 attrs.align = MIN (attrs.align, max_align);
2463 if (maybe_ne (size, 0))
2465 /* Drop the object if the new right end is not within its bounds. */
2466 if (adjust_object && maybe_gt (offset + size, attrs.size))
2468 attrs.expr = NULL_TREE;
2469 attrs.alias = 0;
2471 attrs.size_known_p = true;
2472 attrs.size = size;
2474 else if (attrs.size_known_p)
2476 gcc_assert (!adjust_object);
2477 attrs.size -= offset;
2478 /* ??? The store_by_pieces machinery generates negative sizes,
2479 so don't assert for that here. */
2482 set_mem_attrs (new_rtx, &attrs);
2484 return new_rtx;
2487 /* Return a memory reference like MEMREF, but with its mode changed
2488 to MODE and its address changed to ADDR, which is assumed to be
2489 MEMREF offset by OFFSET bytes. If VALIDATE is
2490 nonzero, the memory address is forced to be valid. */
2493 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2494 poly_int64 offset, int validate)
2496 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2497 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2500 /* Return a memory reference like MEMREF, but whose address is changed by
2501 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2502 known to be in OFFSET (possibly 1). */
2505 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2507 rtx new_rtx, addr = XEXP (memref, 0);
2508 machine_mode address_mode;
2509 class mem_attrs *defattrs;
2511 mem_attrs attrs (*get_mem_attrs (memref));
2512 address_mode = get_address_mode (memref);
2513 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2515 /* At this point we don't know _why_ the address is invalid. It
2516 could have secondary memory references, multiplies or anything.
2518 However, if we did go and rearrange things, we can wind up not
2519 being able to recognize the magic around pic_offset_table_rtx.
2520 This stuff is fragile, and is yet another example of why it is
2521 bad to expose PIC machinery too early. */
2522 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2523 attrs.addrspace)
2524 && GET_CODE (addr) == PLUS
2525 && XEXP (addr, 0) == pic_offset_table_rtx)
2527 addr = force_reg (GET_MODE (addr), addr);
2528 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2531 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2532 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2534 /* If there are no changes, just return the original memory reference. */
2535 if (new_rtx == memref)
2536 return new_rtx;
2538 /* Update the alignment to reflect the offset. Reset the offset, which
2539 we don't know. */
2540 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2541 attrs.offset_known_p = false;
2542 attrs.size_known_p = defattrs->size_known_p;
2543 attrs.size = defattrs->size;
2544 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2545 set_mem_attrs (new_rtx, &attrs);
2546 return new_rtx;
2549 /* Return a memory reference like MEMREF, but with its address changed to
2550 ADDR. The caller is asserting that the actual piece of memory pointed
2551 to is the same, just the form of the address is being changed, such as
2552 by putting something into a register. INPLACE is true if any changes
2553 can be made directly to MEMREF or false if MEMREF must be treated as
2554 immutable. */
2557 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2559 /* change_address_1 copies the memory attribute structure without change
2560 and that's exactly what we want here. */
2561 update_temp_slot_address (XEXP (memref, 0), addr);
2562 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2565 /* Likewise, but the reference is not required to be valid. */
2568 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2570 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2573 /* Return a memory reference like MEMREF, but with its mode widened to
2574 MODE and offset by OFFSET. This would be used by targets that e.g.
2575 cannot issue QImode memory operations and have to use SImode memory
2576 operations plus masking logic. */
2579 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2581 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2582 poly_uint64 size = GET_MODE_SIZE (mode);
2584 /* If there are no changes, just return the original memory reference. */
2585 if (new_rtx == memref)
2586 return new_rtx;
2588 mem_attrs attrs (*get_mem_attrs (new_rtx));
2590 /* If we don't know what offset we were at within the expression, then
2591 we can't know if we've overstepped the bounds. */
2592 if (! attrs.offset_known_p)
2593 attrs.expr = NULL_TREE;
2595 while (attrs.expr)
2597 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2599 tree field = TREE_OPERAND (attrs.expr, 1);
2600 tree offset = component_ref_field_offset (attrs.expr);
2602 if (! DECL_SIZE_UNIT (field))
2604 attrs.expr = NULL_TREE;
2605 break;
2608 /* Is the field at least as large as the access? If so, ok,
2609 otherwise strip back to the containing structure. */
2610 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2611 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2612 && known_ge (attrs.offset, 0))
2613 break;
2615 poly_uint64 suboffset;
2616 if (!poly_int_tree_p (offset, &suboffset))
2618 attrs.expr = NULL_TREE;
2619 break;
2622 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2623 attrs.offset += suboffset;
2624 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2625 / BITS_PER_UNIT);
2627 /* Similarly for the decl. */
2628 else if (DECL_P (attrs.expr)
2629 && DECL_SIZE_UNIT (attrs.expr)
2630 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2631 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2632 size)
2633 && known_ge (attrs.offset, 0))
2634 break;
2635 else
2637 /* The widened memory access overflows the expression, which means
2638 that it could alias another expression. Zap it. */
2639 attrs.expr = NULL_TREE;
2640 break;
2644 if (! attrs.expr)
2645 attrs.offset_known_p = false;
2647 /* The widened memory may alias other stuff, so zap the alias set. */
2648 /* ??? Maybe use get_alias_set on any remaining expression. */
2649 attrs.alias = 0;
2650 attrs.size_known_p = true;
2651 attrs.size = size;
2652 set_mem_attrs (new_rtx, &attrs);
2653 return new_rtx;
2656 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2657 static GTY(()) tree spill_slot_decl;
2659 tree
2660 get_spill_slot_decl (bool force_build_p)
2662 tree d = spill_slot_decl;
2663 rtx rd;
2665 if (d || !force_build_p)
2666 return d;
2668 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2669 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2670 DECL_ARTIFICIAL (d) = 1;
2671 DECL_IGNORED_P (d) = 1;
2672 TREE_USED (d) = 1;
2673 spill_slot_decl = d;
2675 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2676 MEM_NOTRAP_P (rd) = 1;
2677 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2678 attrs.alias = new_alias_set ();
2679 attrs.expr = d;
2680 set_mem_attrs (rd, &attrs);
2681 SET_DECL_RTL (d, rd);
2683 return d;
2686 /* Given MEM, a result from assign_stack_local, fill in the memory
2687 attributes as appropriate for a register allocator spill slot.
2688 These slots are not aliasable by other memory. We arrange for
2689 them all to use a single MEM_EXPR, so that the aliasing code can
2690 work properly in the case of shared spill slots. */
2692 void
2693 set_mem_attrs_for_spill (rtx mem)
2695 rtx addr;
2697 mem_attrs attrs (*get_mem_attrs (mem));
2698 attrs.expr = get_spill_slot_decl (true);
2699 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2700 attrs.addrspace = ADDR_SPACE_GENERIC;
2702 /* We expect the incoming memory to be of the form:
2703 (mem:MODE (plus (reg sfp) (const_int offset)))
2704 with perhaps the plus missing for offset = 0. */
2705 addr = XEXP (mem, 0);
2706 attrs.offset_known_p = true;
2707 strip_offset (addr, &attrs.offset);
2709 set_mem_attrs (mem, &attrs);
2710 MEM_NOTRAP_P (mem) = 1;
2713 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2715 rtx_code_label *
2716 gen_label_rtx (void)
2718 return as_a <rtx_code_label *> (
2719 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2720 NULL, label_num++, NULL));
2723 /* For procedure integration. */
2725 /* Install new pointers to the first and last insns in the chain.
2726 Also, set cur_insn_uid to one higher than the last in use.
2727 Used for an inline-procedure after copying the insn chain. */
2729 void
2730 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2732 rtx_insn *insn;
2734 set_first_insn (first);
2735 set_last_insn (last);
2736 cur_insn_uid = 0;
2738 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2740 int debug_count = 0;
2742 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2743 cur_debug_insn_uid = 0;
2745 for (insn = first; insn; insn = NEXT_INSN (insn))
2746 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2747 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2748 else
2750 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2751 if (DEBUG_INSN_P (insn))
2752 debug_count++;
2755 if (debug_count)
2756 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2757 else
2758 cur_debug_insn_uid++;
2760 else
2761 for (insn = first; insn; insn = NEXT_INSN (insn))
2762 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2764 cur_insn_uid++;
2767 /* Go through all the RTL insn bodies and copy any invalid shared
2768 structure. This routine should only be called once. */
2770 static void
2771 unshare_all_rtl_1 (rtx_insn *insn)
2773 /* Unshare just about everything else. */
2774 unshare_all_rtl_in_chain (insn);
2776 /* Make sure the addresses of stack slots found outside the insn chain
2777 (such as, in DECL_RTL of a variable) are not shared
2778 with the insn chain.
2780 This special care is necessary when the stack slot MEM does not
2781 actually appear in the insn chain. If it does appear, its address
2782 is unshared from all else at that point. */
2783 unsigned int i;
2784 rtx temp;
2785 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2786 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2789 /* Go through all the RTL insn bodies and copy any invalid shared
2790 structure, again. This is a fairly expensive thing to do so it
2791 should be done sparingly. */
2793 void
2794 unshare_all_rtl_again (rtx_insn *insn)
2796 rtx_insn *p;
2797 tree decl;
2799 for (p = insn; p; p = NEXT_INSN (p))
2800 if (INSN_P (p))
2802 reset_used_flags (PATTERN (p));
2803 reset_used_flags (REG_NOTES (p));
2804 if (CALL_P (p))
2805 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2808 /* Make sure that virtual stack slots are not shared. */
2809 set_used_decls (DECL_INITIAL (cfun->decl));
2811 /* Make sure that virtual parameters are not shared. */
2812 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2813 set_used_flags (DECL_RTL (decl));
2815 rtx temp;
2816 unsigned int i;
2817 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2818 reset_used_flags (temp);
2820 unshare_all_rtl_1 (insn);
2823 void
2824 unshare_all_rtl (void)
2826 unshare_all_rtl_1 (get_insns ());
2828 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2830 if (DECL_RTL_SET_P (decl))
2831 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2832 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2837 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2838 Recursively does the same for subexpressions. */
2840 static void
2841 verify_rtx_sharing (rtx orig, rtx insn)
2843 rtx x = orig;
2844 int i;
2845 enum rtx_code code;
2846 const char *format_ptr;
2848 if (x == 0)
2849 return;
2851 code = GET_CODE (x);
2853 /* These types may be freely shared. */
2855 switch (code)
2857 case REG:
2858 case DEBUG_EXPR:
2859 case VALUE:
2860 CASE_CONST_ANY:
2861 case SYMBOL_REF:
2862 case LABEL_REF:
2863 case CODE_LABEL:
2864 case PC:
2865 case RETURN:
2866 case SIMPLE_RETURN:
2867 case SCRATCH:
2868 /* SCRATCH must be shared because they represent distinct values. */
2869 return;
2870 case CLOBBER:
2871 /* Share clobbers of hard registers, but do not share pseudo reg
2872 clobbers or clobbers of hard registers that originated as pseudos.
2873 This is needed to allow safe register renaming. */
2874 if (REG_P (XEXP (x, 0))
2875 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2876 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2877 return;
2878 break;
2880 case CONST:
2881 if (shared_const_p (orig))
2882 return;
2883 break;
2885 case MEM:
2886 /* A MEM is allowed to be shared if its address is constant. */
2887 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2888 || reload_completed || reload_in_progress)
2889 return;
2891 break;
2893 default:
2894 break;
2897 /* This rtx may not be shared. If it has already been seen,
2898 replace it with a copy of itself. */
2899 if (flag_checking && RTX_FLAG (x, used))
2901 error ("invalid rtl sharing found in the insn");
2902 debug_rtx (insn);
2903 error ("shared rtx");
2904 debug_rtx (x);
2905 internal_error ("internal consistency failure");
2907 gcc_assert (!RTX_FLAG (x, used));
2909 RTX_FLAG (x, used) = 1;
2911 /* Now scan the subexpressions recursively. */
2913 format_ptr = GET_RTX_FORMAT (code);
2915 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2917 switch (*format_ptr++)
2919 case 'e':
2920 verify_rtx_sharing (XEXP (x, i), insn);
2921 break;
2923 case 'E':
2924 if (XVEC (x, i) != NULL)
2926 int j;
2927 int len = XVECLEN (x, i);
2929 for (j = 0; j < len; j++)
2931 /* We allow sharing of ASM_OPERANDS inside single
2932 instruction. */
2933 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2934 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2935 == ASM_OPERANDS))
2936 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2937 else
2938 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2941 break;
2946 /* Reset used-flags for INSN. */
2948 static void
2949 reset_insn_used_flags (rtx insn)
2951 gcc_assert (INSN_P (insn));
2952 reset_used_flags (PATTERN (insn));
2953 reset_used_flags (REG_NOTES (insn));
2954 if (CALL_P (insn))
2955 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2958 /* Go through all the RTL insn bodies and clear all the USED bits. */
2960 static void
2961 reset_all_used_flags (void)
2963 rtx_insn *p;
2965 for (p = get_insns (); p; p = NEXT_INSN (p))
2966 if (INSN_P (p))
2968 rtx pat = PATTERN (p);
2969 if (GET_CODE (pat) != SEQUENCE)
2970 reset_insn_used_flags (p);
2971 else
2973 gcc_assert (REG_NOTES (p) == NULL);
2974 for (int i = 0; i < XVECLEN (pat, 0); i++)
2976 rtx insn = XVECEXP (pat, 0, i);
2977 if (INSN_P (insn))
2978 reset_insn_used_flags (insn);
2984 /* Verify sharing in INSN. */
2986 static void
2987 verify_insn_sharing (rtx insn)
2989 gcc_assert (INSN_P (insn));
2990 verify_rtx_sharing (PATTERN (insn), insn);
2991 verify_rtx_sharing (REG_NOTES (insn), insn);
2992 if (CALL_P (insn))
2993 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2996 /* Go through all the RTL insn bodies and check that there is no unexpected
2997 sharing in between the subexpressions. */
2999 DEBUG_FUNCTION void
3000 verify_rtl_sharing (void)
3002 rtx_insn *p;
3004 timevar_push (TV_VERIFY_RTL_SHARING);
3006 reset_all_used_flags ();
3008 for (p = get_insns (); p; p = NEXT_INSN (p))
3009 if (INSN_P (p))
3011 rtx pat = PATTERN (p);
3012 if (GET_CODE (pat) != SEQUENCE)
3013 verify_insn_sharing (p);
3014 else
3015 for (int i = 0; i < XVECLEN (pat, 0); i++)
3017 rtx insn = XVECEXP (pat, 0, i);
3018 if (INSN_P (insn))
3019 verify_insn_sharing (insn);
3023 reset_all_used_flags ();
3025 timevar_pop (TV_VERIFY_RTL_SHARING);
3028 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3029 Assumes the mark bits are cleared at entry. */
3031 void
3032 unshare_all_rtl_in_chain (rtx_insn *insn)
3034 for (; insn; insn = NEXT_INSN (insn))
3035 if (INSN_P (insn))
3037 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3038 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3039 if (CALL_P (insn))
3040 CALL_INSN_FUNCTION_USAGE (insn)
3041 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3045 /* Go through all virtual stack slots of a function and mark them as
3046 shared. We never replace the DECL_RTLs themselves with a copy,
3047 but expressions mentioned into a DECL_RTL cannot be shared with
3048 expressions in the instruction stream.
3050 Note that reload may convert pseudo registers into memories in-place.
3051 Pseudo registers are always shared, but MEMs never are. Thus if we
3052 reset the used flags on MEMs in the instruction stream, we must set
3053 them again on MEMs that appear in DECL_RTLs. */
3055 static void
3056 set_used_decls (tree blk)
3058 tree t;
3060 /* Mark decls. */
3061 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3062 if (DECL_RTL_SET_P (t))
3063 set_used_flags (DECL_RTL (t));
3065 /* Now process sub-blocks. */
3066 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3067 set_used_decls (t);
3070 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3071 Recursively does the same for subexpressions. Uses
3072 copy_rtx_if_shared_1 to reduce stack space. */
3075 copy_rtx_if_shared (rtx orig)
3077 copy_rtx_if_shared_1 (&orig);
3078 return orig;
3081 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3082 use. Recursively does the same for subexpressions. */
3084 static void
3085 copy_rtx_if_shared_1 (rtx *orig1)
3087 rtx x;
3088 int i;
3089 enum rtx_code code;
3090 rtx *last_ptr;
3091 const char *format_ptr;
3092 int copied = 0;
3093 int length;
3095 /* Repeat is used to turn tail-recursion into iteration. */
3096 repeat:
3097 x = *orig1;
3099 if (x == 0)
3100 return;
3102 code = GET_CODE (x);
3104 /* These types may be freely shared. */
3106 switch (code)
3108 case REG:
3109 case DEBUG_EXPR:
3110 case VALUE:
3111 CASE_CONST_ANY:
3112 case SYMBOL_REF:
3113 case LABEL_REF:
3114 case CODE_LABEL:
3115 case PC:
3116 case RETURN:
3117 case SIMPLE_RETURN:
3118 case SCRATCH:
3119 /* SCRATCH must be shared because they represent distinct values. */
3120 return;
3121 case CLOBBER:
3122 /* Share clobbers of hard registers, but do not share pseudo reg
3123 clobbers or clobbers of hard registers that originated as pseudos.
3124 This is needed to allow safe register renaming. */
3125 if (REG_P (XEXP (x, 0))
3126 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3127 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3128 return;
3129 break;
3131 case CONST:
3132 if (shared_const_p (x))
3133 return;
3134 break;
3136 case DEBUG_INSN:
3137 case INSN:
3138 case JUMP_INSN:
3139 case CALL_INSN:
3140 case NOTE:
3141 case BARRIER:
3142 /* The chain of insns is not being copied. */
3143 return;
3145 default:
3146 break;
3149 /* This rtx may not be shared. If it has already been seen,
3150 replace it with a copy of itself. */
3152 if (RTX_FLAG (x, used))
3154 x = shallow_copy_rtx (x);
3155 copied = 1;
3157 RTX_FLAG (x, used) = 1;
3159 /* Now scan the subexpressions recursively.
3160 We can store any replaced subexpressions directly into X
3161 since we know X is not shared! Any vectors in X
3162 must be copied if X was copied. */
3164 format_ptr = GET_RTX_FORMAT (code);
3165 length = GET_RTX_LENGTH (code);
3166 last_ptr = NULL;
3168 for (i = 0; i < length; i++)
3170 switch (*format_ptr++)
3172 case 'e':
3173 if (last_ptr)
3174 copy_rtx_if_shared_1 (last_ptr);
3175 last_ptr = &XEXP (x, i);
3176 break;
3178 case 'E':
3179 if (XVEC (x, i) != NULL)
3181 int j;
3182 int len = XVECLEN (x, i);
3184 /* Copy the vector iff I copied the rtx and the length
3185 is nonzero. */
3186 if (copied && len > 0)
3187 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3189 /* Call recursively on all inside the vector. */
3190 for (j = 0; j < len; j++)
3192 if (last_ptr)
3193 copy_rtx_if_shared_1 (last_ptr);
3194 last_ptr = &XVECEXP (x, i, j);
3197 break;
3200 *orig1 = x;
3201 if (last_ptr)
3203 orig1 = last_ptr;
3204 goto repeat;
3208 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3210 static void
3211 mark_used_flags (rtx x, int flag)
3213 int i, j;
3214 enum rtx_code code;
3215 const char *format_ptr;
3216 int length;
3218 /* Repeat is used to turn tail-recursion into iteration. */
3219 repeat:
3220 if (x == 0)
3221 return;
3223 code = GET_CODE (x);
3225 /* These types may be freely shared so we needn't do any resetting
3226 for them. */
3228 switch (code)
3230 case REG:
3231 case DEBUG_EXPR:
3232 case VALUE:
3233 CASE_CONST_ANY:
3234 case SYMBOL_REF:
3235 case CODE_LABEL:
3236 case PC:
3237 case RETURN:
3238 case SIMPLE_RETURN:
3239 return;
3241 case DEBUG_INSN:
3242 case INSN:
3243 case JUMP_INSN:
3244 case CALL_INSN:
3245 case NOTE:
3246 case LABEL_REF:
3247 case BARRIER:
3248 /* The chain of insns is not being copied. */
3249 return;
3251 default:
3252 break;
3255 RTX_FLAG (x, used) = flag;
3257 format_ptr = GET_RTX_FORMAT (code);
3258 length = GET_RTX_LENGTH (code);
3260 for (i = 0; i < length; i++)
3262 switch (*format_ptr++)
3264 case 'e':
3265 if (i == length-1)
3267 x = XEXP (x, i);
3268 goto repeat;
3270 mark_used_flags (XEXP (x, i), flag);
3271 break;
3273 case 'E':
3274 for (j = 0; j < XVECLEN (x, i); j++)
3275 mark_used_flags (XVECEXP (x, i, j), flag);
3276 break;
3281 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3282 to look for shared sub-parts. */
3284 void
3285 reset_used_flags (rtx x)
3287 mark_used_flags (x, 0);
3290 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3291 to look for shared sub-parts. */
3293 void
3294 set_used_flags (rtx x)
3296 mark_used_flags (x, 1);
3299 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3300 Return X or the rtx for the pseudo reg the value of X was copied into.
3301 OTHER must be valid as a SET_DEST. */
3304 make_safe_from (rtx x, rtx other)
3306 while (1)
3307 switch (GET_CODE (other))
3309 case SUBREG:
3310 other = SUBREG_REG (other);
3311 break;
3312 case STRICT_LOW_PART:
3313 case SIGN_EXTEND:
3314 case ZERO_EXTEND:
3315 other = XEXP (other, 0);
3316 break;
3317 default:
3318 goto done;
3320 done:
3321 if ((MEM_P (other)
3322 && ! CONSTANT_P (x)
3323 && !REG_P (x)
3324 && GET_CODE (x) != SUBREG)
3325 || (REG_P (other)
3326 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3327 || reg_mentioned_p (other, x))))
3329 rtx temp = gen_reg_rtx (GET_MODE (x));
3330 emit_move_insn (temp, x);
3331 return temp;
3333 return x;
3336 /* Emission of insns (adding them to the doubly-linked list). */
3338 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3340 rtx_insn *
3341 get_last_insn_anywhere (void)
3343 struct sequence_stack *seq;
3344 for (seq = get_current_sequence (); seq; seq = seq->next)
3345 if (seq->last != 0)
3346 return seq->last;
3347 return 0;
3350 /* Return the first nonnote insn emitted in current sequence or current
3351 function. This routine looks inside SEQUENCEs. */
3353 rtx_insn *
3354 get_first_nonnote_insn (void)
3356 rtx_insn *insn = get_insns ();
3358 if (insn)
3360 if (NOTE_P (insn))
3361 for (insn = next_insn (insn);
3362 insn && NOTE_P (insn);
3363 insn = next_insn (insn))
3364 continue;
3365 else
3367 if (NONJUMP_INSN_P (insn)
3368 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3369 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3373 return insn;
3376 /* Return the last nonnote insn emitted in current sequence or current
3377 function. This routine looks inside SEQUENCEs. */
3379 rtx_insn *
3380 get_last_nonnote_insn (void)
3382 rtx_insn *insn = get_last_insn ();
3384 if (insn)
3386 if (NOTE_P (insn))
3387 for (insn = previous_insn (insn);
3388 insn && NOTE_P (insn);
3389 insn = previous_insn (insn))
3390 continue;
3391 else
3393 if (NONJUMP_INSN_P (insn))
3394 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3395 insn = seq->insn (seq->len () - 1);
3399 return insn;
3402 /* Return the number of actual (non-debug) insns emitted in this
3403 function. */
3406 get_max_insn_count (void)
3408 int n = cur_insn_uid;
3410 /* The table size must be stable across -g, to avoid codegen
3411 differences due to debug insns, and not be affected by
3412 -fmin-insn-uid, to avoid excessive table size and to simplify
3413 debugging of -fcompare-debug failures. */
3414 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3415 n -= cur_debug_insn_uid;
3416 else
3417 n -= param_min_nondebug_insn_uid;
3419 return n;
3423 /* Return the next insn. If it is a SEQUENCE, return the first insn
3424 of the sequence. */
3426 rtx_insn *
3427 next_insn (rtx_insn *insn)
3429 if (insn)
3431 insn = NEXT_INSN (insn);
3432 if (insn && NONJUMP_INSN_P (insn)
3433 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3434 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3437 return insn;
3440 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3441 of the sequence. */
3443 rtx_insn *
3444 previous_insn (rtx_insn *insn)
3446 if (insn)
3448 insn = PREV_INSN (insn);
3449 if (insn && NONJUMP_INSN_P (insn))
3450 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3451 insn = seq->insn (seq->len () - 1);
3454 return insn;
3457 /* Return the next insn after INSN that is not a NOTE. This routine does not
3458 look inside SEQUENCEs. */
3460 rtx_insn *
3461 next_nonnote_insn (rtx_insn *insn)
3463 while (insn)
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || !NOTE_P (insn))
3467 break;
3470 return insn;
3473 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3474 routine does not look inside SEQUENCEs. */
3476 rtx_insn *
3477 next_nondebug_insn (rtx_insn *insn)
3479 while (insn)
3481 insn = NEXT_INSN (insn);
3482 if (insn == 0 || !DEBUG_INSN_P (insn))
3483 break;
3486 return insn;
3489 /* Return the previous insn before INSN that is not a NOTE. This routine does
3490 not look inside SEQUENCEs. */
3492 rtx_insn *
3493 prev_nonnote_insn (rtx_insn *insn)
3495 while (insn)
3497 insn = PREV_INSN (insn);
3498 if (insn == 0 || !NOTE_P (insn))
3499 break;
3502 return insn;
3505 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3506 This routine does not look inside SEQUENCEs. */
3508 rtx_insn *
3509 prev_nondebug_insn (rtx_insn *insn)
3511 while (insn)
3513 insn = PREV_INSN (insn);
3514 if (insn == 0 || !DEBUG_INSN_P (insn))
3515 break;
3518 return insn;
3521 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3522 This routine does not look inside SEQUENCEs. */
3524 rtx_insn *
3525 next_nonnote_nondebug_insn (rtx_insn *insn)
3527 while (insn)
3529 insn = NEXT_INSN (insn);
3530 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3531 break;
3534 return insn;
3537 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3538 but stop the search before we enter another basic block. This
3539 routine does not look inside SEQUENCEs. */
3541 rtx_insn *
3542 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544 while (insn)
3546 insn = NEXT_INSN (insn);
3547 if (insn == 0)
3548 break;
3549 if (DEBUG_INSN_P (insn))
3550 continue;
3551 if (!NOTE_P (insn))
3552 break;
3553 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3554 return NULL;
3557 return insn;
3560 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3561 This routine does not look inside SEQUENCEs. */
3563 rtx_insn *
3564 prev_nonnote_nondebug_insn (rtx_insn *insn)
3566 while (insn)
3568 insn = PREV_INSN (insn);
3569 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3570 break;
3573 return insn;
3576 /* Return the previous insn before INSN that is not a NOTE nor
3577 DEBUG_INSN, but stop the search before we enter another basic
3578 block. This routine does not look inside SEQUENCEs. */
3580 rtx_insn *
3581 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583 while (insn)
3585 insn = PREV_INSN (insn);
3586 if (insn == 0)
3587 break;
3588 if (DEBUG_INSN_P (insn))
3589 continue;
3590 if (!NOTE_P (insn))
3591 break;
3592 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3593 return NULL;
3596 return insn;
3599 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3600 or 0, if there is none. This routine does not look inside
3601 SEQUENCEs. */
3603 rtx_insn *
3604 next_real_insn (rtx_insn *insn)
3606 while (insn)
3608 insn = NEXT_INSN (insn);
3609 if (insn == 0 || INSN_P (insn))
3610 break;
3613 return insn;
3616 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3617 or 0, if there is none. This routine does not look inside
3618 SEQUENCEs. */
3620 rtx_insn *
3621 prev_real_insn (rtx_insn *insn)
3623 while (insn)
3625 insn = PREV_INSN (insn);
3626 if (insn == 0 || INSN_P (insn))
3627 break;
3630 return insn;
3633 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3634 or 0, if there is none. This routine does not look inside
3635 SEQUENCEs. */
3637 rtx_insn *
3638 next_real_nondebug_insn (rtx uncast_insn)
3640 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3642 while (insn)
3644 insn = NEXT_INSN (insn);
3645 if (insn == 0 || NONDEBUG_INSN_P (insn))
3646 break;
3649 return insn;
3652 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3653 or 0, if there is none. This routine does not look inside
3654 SEQUENCEs. */
3656 rtx_insn *
3657 prev_real_nondebug_insn (rtx_insn *insn)
3659 while (insn)
3661 insn = PREV_INSN (insn);
3662 if (insn == 0 || NONDEBUG_INSN_P (insn))
3663 break;
3666 return insn;
3669 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3670 This routine does not look inside SEQUENCEs. */
3672 rtx_call_insn *
3673 last_call_insn (void)
3675 rtx_insn *insn;
3677 for (insn = get_last_insn ();
3678 insn && !CALL_P (insn);
3679 insn = PREV_INSN (insn))
3682 return safe_as_a <rtx_call_insn *> (insn);
3685 bool
3686 active_insn_p (const rtx_insn *insn)
3688 return (CALL_P (insn) || JUMP_P (insn)
3689 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3690 || (NONJUMP_INSN_P (insn)
3691 && (! reload_completed
3692 || (GET_CODE (PATTERN (insn)) != USE
3693 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3696 /* Find the next insn after INSN that really does something. This routine
3697 does not look inside SEQUENCEs. After reload this also skips over
3698 standalone USE and CLOBBER insn. */
3700 rtx_insn *
3701 next_active_insn (rtx_insn *insn)
3703 while (insn)
3705 insn = NEXT_INSN (insn);
3706 if (insn == 0 || active_insn_p (insn))
3707 break;
3710 return insn;
3713 /* Find the last insn before INSN that really does something. This routine
3714 does not look inside SEQUENCEs. After reload this also skips over
3715 standalone USE and CLOBBER insn. */
3717 rtx_insn *
3718 prev_active_insn (rtx_insn *insn)
3720 while (insn)
3722 insn = PREV_INSN (insn);
3723 if (insn == 0 || active_insn_p (insn))
3724 break;
3727 return insn;
3730 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3732 static int
3733 find_auto_inc (const_rtx x, const_rtx reg)
3735 subrtx_iterator::array_type array;
3736 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3738 const_rtx x = *iter;
3739 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3740 && rtx_equal_p (reg, XEXP (x, 0)))
3741 return true;
3743 return false;
3746 /* Increment the label uses for all labels present in rtx. */
3748 static void
3749 mark_label_nuses (rtx x)
3751 enum rtx_code code;
3752 int i, j;
3753 const char *fmt;
3755 code = GET_CODE (x);
3756 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3757 LABEL_NUSES (label_ref_label (x))++;
3759 fmt = GET_RTX_FORMAT (code);
3760 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3762 if (fmt[i] == 'e')
3763 mark_label_nuses (XEXP (x, i));
3764 else if (fmt[i] == 'E')
3765 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3766 mark_label_nuses (XVECEXP (x, i, j));
3771 /* Try splitting insns that can be split for better scheduling.
3772 PAT is the pattern which might split.
3773 TRIAL is the insn providing PAT.
3774 LAST is nonzero if we should return the last insn of the sequence produced.
3776 If this routine succeeds in splitting, it returns the first or last
3777 replacement insn depending on the value of LAST. Otherwise, it
3778 returns TRIAL. If the insn to be returned can be split, it will be. */
3780 rtx_insn *
3781 try_split (rtx pat, rtx_insn *trial, int last)
3783 rtx_insn *before, *after;
3784 rtx note;
3785 rtx_insn *seq, *tem;
3786 profile_probability probability;
3787 rtx_insn *insn_last, *insn;
3788 int njumps = 0;
3789 rtx_insn *call_insn = NULL;
3791 if (any_condjump_p (trial)
3792 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3793 split_branch_probability
3794 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3795 else
3796 split_branch_probability = profile_probability::uninitialized ();
3798 probability = split_branch_probability;
3800 seq = split_insns (pat, trial);
3802 split_branch_probability = profile_probability::uninitialized ();
3804 if (!seq)
3805 return trial;
3807 int split_insn_count = 0;
3808 /* Avoid infinite loop if any insn of the result matches
3809 the original pattern. */
3810 insn_last = seq;
3811 while (1)
3813 if (INSN_P (insn_last)
3814 && rtx_equal_p (PATTERN (insn_last), pat))
3815 return trial;
3816 split_insn_count++;
3817 if (!NEXT_INSN (insn_last))
3818 break;
3819 insn_last = NEXT_INSN (insn_last);
3822 /* We're not good at redistributing frame information if
3823 the split occurs before reload or if it results in more
3824 than one insn. */
3825 if (RTX_FRAME_RELATED_P (trial))
3827 if (!reload_completed || split_insn_count != 1)
3828 return trial;
3830 rtx_insn *new_insn = seq;
3831 rtx_insn *old_insn = trial;
3832 copy_frame_info_to_split_insn (old_insn, new_insn);
3835 /* We will be adding the new sequence to the function. The splitters
3836 may have introduced invalid RTL sharing, so unshare the sequence now. */
3837 unshare_all_rtl_in_chain (seq);
3839 /* Mark labels and copy flags. */
3840 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3842 if (JUMP_P (insn))
3844 if (JUMP_P (trial))
3845 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3846 mark_jump_label (PATTERN (insn), insn, 0);
3847 njumps++;
3848 if (probability.initialized_p ()
3849 && any_condjump_p (insn)
3850 && !find_reg_note (insn, REG_BR_PROB, 0))
3852 /* We can preserve the REG_BR_PROB notes only if exactly
3853 one jump is created, otherwise the machine description
3854 is responsible for this step using
3855 split_branch_probability variable. */
3856 gcc_assert (njumps == 1);
3857 add_reg_br_prob_note (insn, probability);
3862 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3863 in SEQ and copy any additional information across. */
3864 if (CALL_P (trial))
3866 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3867 if (CALL_P (insn))
3869 gcc_assert (call_insn == NULL_RTX);
3870 call_insn = insn;
3872 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3873 target may have explicitly specified. */
3874 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3875 while (*p)
3876 p = &XEXP (*p, 1);
3877 *p = CALL_INSN_FUNCTION_USAGE (trial);
3879 /* If the old call was a sibling call, the new one must
3880 be too. */
3881 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3885 /* Copy notes, particularly those related to the CFG. */
3886 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3888 switch (REG_NOTE_KIND (note))
3890 case REG_EH_REGION:
3891 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3892 break;
3894 case REG_NORETURN:
3895 case REG_SETJMP:
3896 case REG_TM:
3897 case REG_CALL_NOCF_CHECK:
3898 case REG_CALL_ARG_LOCATION:
3899 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3901 if (CALL_P (insn))
3902 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3904 break;
3906 case REG_NON_LOCAL_GOTO:
3907 case REG_LABEL_TARGET:
3908 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3910 if (JUMP_P (insn))
3911 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3913 break;
3915 case REG_INC:
3916 if (!AUTO_INC_DEC)
3917 break;
3919 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3921 rtx reg = XEXP (note, 0);
3922 if (!FIND_REG_INC_NOTE (insn, reg)
3923 && find_auto_inc (PATTERN (insn), reg))
3924 add_reg_note (insn, REG_INC, reg);
3926 break;
3928 case REG_ARGS_SIZE:
3929 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3930 break;
3932 case REG_CALL_DECL:
3933 case REG_UNTYPED_CALL:
3934 gcc_assert (call_insn != NULL_RTX);
3935 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3936 break;
3938 default:
3939 break;
3943 /* If there are LABELS inside the split insns increment the
3944 usage count so we don't delete the label. */
3945 if (INSN_P (trial))
3947 insn = insn_last;
3948 while (insn != NULL_RTX)
3950 /* JUMP_P insns have already been "marked" above. */
3951 if (NONJUMP_INSN_P (insn))
3952 mark_label_nuses (PATTERN (insn));
3954 insn = PREV_INSN (insn);
3958 before = PREV_INSN (trial);
3959 after = NEXT_INSN (trial);
3961 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3963 delete_insn (trial);
3965 /* Recursively call try_split for each new insn created; by the
3966 time control returns here that insn will be fully split, so
3967 set LAST and continue from the insn after the one returned.
3968 We can't use next_active_insn here since AFTER may be a note.
3969 Ignore deleted insns, which can be occur if not optimizing. */
3970 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3971 if (! tem->deleted () && INSN_P (tem))
3972 tem = try_split (PATTERN (tem), tem, 1);
3974 /* Return either the first or the last insn, depending on which was
3975 requested. */
3976 return last
3977 ? (after ? PREV_INSN (after) : get_last_insn ())
3978 : NEXT_INSN (before);
3981 /* Make and return an INSN rtx, initializing all its slots.
3982 Store PATTERN in the pattern slots. */
3984 rtx_insn *
3985 make_insn_raw (rtx pattern)
3987 rtx_insn *insn;
3989 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3991 INSN_UID (insn) = cur_insn_uid++;
3992 PATTERN (insn) = pattern;
3993 INSN_CODE (insn) = -1;
3994 REG_NOTES (insn) = NULL;
3995 INSN_LOCATION (insn) = curr_insn_location ();
3996 BLOCK_FOR_INSN (insn) = NULL;
3998 #ifdef ENABLE_RTL_CHECKING
3999 if (insn
4000 && INSN_P (insn)
4001 && (returnjump_p (insn)
4002 || (GET_CODE (insn) == SET
4003 && SET_DEST (insn) == pc_rtx)))
4005 warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
4006 debug_rtx (insn);
4008 #endif
4010 return insn;
4013 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4015 static rtx_insn *
4016 make_debug_insn_raw (rtx pattern)
4018 rtx_debug_insn *insn;
4020 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4021 INSN_UID (insn) = cur_debug_insn_uid++;
4022 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4023 INSN_UID (insn) = cur_insn_uid++;
4025 PATTERN (insn) = pattern;
4026 INSN_CODE (insn) = -1;
4027 REG_NOTES (insn) = NULL;
4028 INSN_LOCATION (insn) = curr_insn_location ();
4029 BLOCK_FOR_INSN (insn) = NULL;
4031 return insn;
4034 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4036 static rtx_insn *
4037 make_jump_insn_raw (rtx pattern)
4039 rtx_jump_insn *insn;
4041 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4042 INSN_UID (insn) = cur_insn_uid++;
4044 PATTERN (insn) = pattern;
4045 INSN_CODE (insn) = -1;
4046 REG_NOTES (insn) = NULL;
4047 JUMP_LABEL (insn) = NULL;
4048 INSN_LOCATION (insn) = curr_insn_location ();
4049 BLOCK_FOR_INSN (insn) = NULL;
4051 return insn;
4054 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4056 static rtx_insn *
4057 make_call_insn_raw (rtx pattern)
4059 rtx_call_insn *insn;
4061 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4062 INSN_UID (insn) = cur_insn_uid++;
4064 PATTERN (insn) = pattern;
4065 INSN_CODE (insn) = -1;
4066 REG_NOTES (insn) = NULL;
4067 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4068 INSN_LOCATION (insn) = curr_insn_location ();
4069 BLOCK_FOR_INSN (insn) = NULL;
4071 return insn;
4074 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4076 static rtx_note *
4077 make_note_raw (enum insn_note subtype)
4079 /* Some notes are never created this way at all. These notes are
4080 only created by patching out insns. */
4081 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4082 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4084 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4085 INSN_UID (note) = cur_insn_uid++;
4086 NOTE_KIND (note) = subtype;
4087 BLOCK_FOR_INSN (note) = NULL;
4088 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4089 return note;
4092 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4093 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4094 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4096 static inline void
4097 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4099 SET_PREV_INSN (insn) = prev;
4100 SET_NEXT_INSN (insn) = next;
4101 if (prev != NULL)
4103 SET_NEXT_INSN (prev) = insn;
4104 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4106 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4107 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4110 if (next != NULL)
4112 SET_PREV_INSN (next) = insn;
4113 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4115 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4116 SET_PREV_INSN (sequence->insn (0)) = insn;
4120 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4122 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4123 SET_PREV_INSN (sequence->insn (0)) = prev;
4124 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4128 /* Add INSN to the end of the doubly-linked list.
4129 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4131 void
4132 add_insn (rtx_insn *insn)
4134 rtx_insn *prev = get_last_insn ();
4135 link_insn_into_chain (insn, prev, NULL);
4136 if (get_insns () == NULL)
4137 set_first_insn (insn);
4138 set_last_insn (insn);
4141 /* Add INSN into the doubly-linked list after insn AFTER. */
4143 static void
4144 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4146 rtx_insn *next = NEXT_INSN (after);
4148 gcc_assert (!optimize || !after->deleted ());
4150 link_insn_into_chain (insn, after, next);
4152 if (next == NULL)
4154 struct sequence_stack *seq;
4156 for (seq = get_current_sequence (); seq; seq = seq->next)
4157 if (after == seq->last)
4159 seq->last = insn;
4160 break;
4165 /* Add INSN into the doubly-linked list before insn BEFORE. */
4167 static void
4168 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4170 rtx_insn *prev = PREV_INSN (before);
4172 gcc_assert (!optimize || !before->deleted ());
4174 link_insn_into_chain (insn, prev, before);
4176 if (prev == NULL)
4178 struct sequence_stack *seq;
4180 for (seq = get_current_sequence (); seq; seq = seq->next)
4181 if (before == seq->first)
4183 seq->first = insn;
4184 break;
4187 gcc_assert (seq);
4191 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4192 If BB is NULL, an attempt is made to infer the bb from before.
4194 This and the next function should be the only functions called
4195 to insert an insn once delay slots have been filled since only
4196 they know how to update a SEQUENCE. */
4198 void
4199 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4201 add_insn_after_nobb (insn, after);
4202 if (!BARRIER_P (after)
4203 && !BARRIER_P (insn)
4204 && (bb = BLOCK_FOR_INSN (after)))
4206 set_block_for_insn (insn, bb);
4207 if (INSN_P (insn))
4208 df_insn_rescan (insn);
4209 /* Should not happen as first in the BB is always
4210 either NOTE or LABEL. */
4211 if (BB_END (bb) == after
4212 /* Avoid clobbering of structure when creating new BB. */
4213 && !BARRIER_P (insn)
4214 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4215 BB_END (bb) = insn;
4219 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4220 If BB is NULL, an attempt is made to infer the bb from before.
4222 This and the previous function should be the only functions called
4223 to insert an insn once delay slots have been filled since only
4224 they know how to update a SEQUENCE. */
4226 void
4227 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4229 add_insn_before_nobb (insn, before);
4231 if (!bb
4232 && !BARRIER_P (before)
4233 && !BARRIER_P (insn))
4234 bb = BLOCK_FOR_INSN (before);
4236 if (bb)
4238 set_block_for_insn (insn, bb);
4239 if (INSN_P (insn))
4240 df_insn_rescan (insn);
4241 /* Should not happen as first in the BB is always either NOTE or
4242 LABEL. */
4243 gcc_assert (BB_HEAD (bb) != insn
4244 /* Avoid clobbering of structure when creating new BB. */
4245 || BARRIER_P (insn)
4246 || NOTE_INSN_BASIC_BLOCK_P (insn));
4250 /* Replace insn with an deleted instruction note. */
4252 void
4253 set_insn_deleted (rtx_insn *insn)
4255 if (INSN_P (insn))
4256 df_insn_delete (insn);
4257 PUT_CODE (insn, NOTE);
4258 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4262 /* Unlink INSN from the insn chain.
4264 This function knows how to handle sequences.
4266 This function does not invalidate data flow information associated with
4267 INSN (i.e. does not call df_insn_delete). That makes this function
4268 usable for only disconnecting an insn from the chain, and re-emit it
4269 elsewhere later.
4271 To later insert INSN elsewhere in the insn chain via add_insn and
4272 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4273 the caller. Nullifying them here breaks many insn chain walks.
4275 To really delete an insn and related DF information, use delete_insn. */
4277 void
4278 remove_insn (rtx_insn *insn)
4280 rtx_insn *next = NEXT_INSN (insn);
4281 rtx_insn *prev = PREV_INSN (insn);
4282 basic_block bb;
4284 if (prev)
4286 SET_NEXT_INSN (prev) = next;
4287 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4289 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4290 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4293 else
4295 struct sequence_stack *seq;
4297 for (seq = get_current_sequence (); seq; seq = seq->next)
4298 if (insn == seq->first)
4300 seq->first = next;
4301 break;
4304 gcc_assert (seq);
4307 if (next)
4309 SET_PREV_INSN (next) = prev;
4310 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4312 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4313 SET_PREV_INSN (sequence->insn (0)) = prev;
4316 else
4318 struct sequence_stack *seq;
4320 for (seq = get_current_sequence (); seq; seq = seq->next)
4321 if (insn == seq->last)
4323 seq->last = prev;
4324 break;
4327 gcc_assert (seq);
4330 /* Fix up basic block boundaries, if necessary. */
4331 if (!BARRIER_P (insn)
4332 && (bb = BLOCK_FOR_INSN (insn)))
4334 if (BB_HEAD (bb) == insn)
4336 /* Never ever delete the basic block note without deleting whole
4337 basic block. */
4338 gcc_assert (!NOTE_P (insn));
4339 BB_HEAD (bb) = next;
4341 if (BB_END (bb) == insn)
4342 BB_END (bb) = prev;
4346 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4348 void
4349 add_function_usage_to (rtx call_insn, rtx call_fusage)
4351 gcc_assert (call_insn && CALL_P (call_insn));
4353 /* Put the register usage information on the CALL. If there is already
4354 some usage information, put ours at the end. */
4355 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4357 rtx link;
4359 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4360 link = XEXP (link, 1))
4363 XEXP (link, 1) = call_fusage;
4365 else
4366 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4369 /* Delete all insns made since FROM.
4370 FROM becomes the new last instruction. */
4372 void
4373 delete_insns_since (rtx_insn *from)
4375 if (from == 0)
4376 set_first_insn (0);
4377 else
4378 SET_NEXT_INSN (from) = 0;
4379 set_last_insn (from);
4382 /* This function is deprecated, please use sequences instead.
4384 Move a consecutive bunch of insns to a different place in the chain.
4385 The insns to be moved are those between FROM and TO.
4386 They are moved to a new position after the insn AFTER.
4387 AFTER must not be FROM or TO or any insn in between.
4389 This function does not know about SEQUENCEs and hence should not be
4390 called after delay-slot filling has been done. */
4392 void
4393 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4395 if (flag_checking)
4397 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4398 gcc_assert (after != x);
4399 gcc_assert (after != to);
4402 /* Splice this bunch out of where it is now. */
4403 if (PREV_INSN (from))
4404 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4405 if (NEXT_INSN (to))
4406 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4407 if (get_last_insn () == to)
4408 set_last_insn (PREV_INSN (from));
4409 if (get_insns () == from)
4410 set_first_insn (NEXT_INSN (to));
4412 /* Make the new neighbors point to it and it to them. */
4413 if (NEXT_INSN (after))
4414 SET_PREV_INSN (NEXT_INSN (after)) = to;
4416 SET_NEXT_INSN (to) = NEXT_INSN (after);
4417 SET_PREV_INSN (from) = after;
4418 SET_NEXT_INSN (after) = from;
4419 if (after == get_last_insn ())
4420 set_last_insn (to);
4423 /* Same as function above, but take care to update BB boundaries. */
4424 void
4425 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4427 rtx_insn *prev = PREV_INSN (from);
4428 basic_block bb, bb2;
4430 reorder_insns_nobb (from, to, after);
4432 if (!BARRIER_P (after)
4433 && (bb = BLOCK_FOR_INSN (after)))
4435 rtx_insn *x;
4436 df_set_bb_dirty (bb);
4438 if (!BARRIER_P (from)
4439 && (bb2 = BLOCK_FOR_INSN (from)))
4441 if (BB_END (bb2) == to)
4442 BB_END (bb2) = prev;
4443 df_set_bb_dirty (bb2);
4446 if (BB_END (bb) == after)
4447 BB_END (bb) = to;
4449 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4450 if (!BARRIER_P (x))
4451 df_insn_change_bb (x, bb);
4456 /* Emit insn(s) of given code and pattern
4457 at a specified place within the doubly-linked list.
4459 All of the emit_foo global entry points accept an object
4460 X which is either an insn list or a PATTERN of a single
4461 instruction.
4463 There are thus a few canonical ways to generate code and
4464 emit it at a specific place in the instruction stream. For
4465 example, consider the instruction named SPOT and the fact that
4466 we would like to emit some instructions before SPOT. We might
4467 do it like this:
4469 start_sequence ();
4470 ... emit the new instructions ...
4471 insns_head = get_insns ();
4472 end_sequence ();
4474 emit_insn_before (insns_head, SPOT);
4476 It used to be common to generate SEQUENCE rtl instead, but that
4477 is a relic of the past which no longer occurs. The reason is that
4478 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4479 generated would almost certainly die right after it was created. */
4481 static rtx_insn *
4482 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4483 basic_block bb,
4484 rtx_insn *(*make_raw) (rtx))
4486 rtx_insn *insn;
4488 gcc_assert (before);
4490 if (x == NULL_RTX)
4491 return last;
4493 switch (GET_CODE (x))
4495 case DEBUG_INSN:
4496 case INSN:
4497 case JUMP_INSN:
4498 case CALL_INSN:
4499 case CODE_LABEL:
4500 case BARRIER:
4501 case NOTE:
4502 insn = as_a <rtx_insn *> (x);
4503 while (insn)
4505 rtx_insn *next = NEXT_INSN (insn);
4506 add_insn_before (insn, before, bb);
4507 last = insn;
4508 insn = next;
4510 break;
4512 #ifdef ENABLE_RTL_CHECKING
4513 case SEQUENCE:
4514 gcc_unreachable ();
4515 break;
4516 #endif
4518 default:
4519 last = (*make_raw) (x);
4520 add_insn_before (last, before, bb);
4521 break;
4524 return last;
4527 /* Make X be output before the instruction BEFORE. */
4529 rtx_insn *
4530 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4532 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4535 /* Make an instruction with body X and code JUMP_INSN
4536 and output it before the instruction BEFORE. */
4538 rtx_jump_insn *
4539 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4541 return as_a <rtx_jump_insn *> (
4542 emit_pattern_before_noloc (x, before, NULL, NULL,
4543 make_jump_insn_raw));
4546 /* Make an instruction with body X and code CALL_INSN
4547 and output it before the instruction BEFORE. */
4549 rtx_insn *
4550 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4552 return emit_pattern_before_noloc (x, before, NULL, NULL,
4553 make_call_insn_raw);
4556 /* Make an instruction with body X and code DEBUG_INSN
4557 and output it before the instruction BEFORE. */
4559 rtx_insn *
4560 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4562 return emit_pattern_before_noloc (x, before, NULL, NULL,
4563 make_debug_insn_raw);
4566 /* Make an insn of code BARRIER
4567 and output it before the insn BEFORE. */
4569 rtx_barrier *
4570 emit_barrier_before (rtx_insn *before)
4572 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4574 INSN_UID (insn) = cur_insn_uid++;
4576 add_insn_before (insn, before, NULL);
4577 return insn;
4580 /* Emit the label LABEL before the insn BEFORE. */
4582 rtx_code_label *
4583 emit_label_before (rtx_code_label *label, rtx_insn *before)
4585 gcc_checking_assert (INSN_UID (label) == 0);
4586 INSN_UID (label) = cur_insn_uid++;
4587 add_insn_before (label, before, NULL);
4588 return label;
4591 /* Helper for emit_insn_after, handles lists of instructions
4592 efficiently. */
4594 static rtx_insn *
4595 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4597 rtx_insn *last;
4598 rtx_insn *after_after;
4599 if (!bb && !BARRIER_P (after))
4600 bb = BLOCK_FOR_INSN (after);
4602 if (bb)
4604 df_set_bb_dirty (bb);
4605 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4606 if (!BARRIER_P (last))
4608 set_block_for_insn (last, bb);
4609 df_insn_rescan (last);
4611 if (!BARRIER_P (last))
4613 set_block_for_insn (last, bb);
4614 df_insn_rescan (last);
4616 if (BB_END (bb) == after)
4617 BB_END (bb) = last;
4619 else
4620 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4621 continue;
4623 after_after = NEXT_INSN (after);
4625 SET_NEXT_INSN (after) = first;
4626 SET_PREV_INSN (first) = after;
4627 SET_NEXT_INSN (last) = after_after;
4628 if (after_after)
4629 SET_PREV_INSN (after_after) = last;
4631 if (after == get_last_insn ())
4632 set_last_insn (last);
4634 return last;
4637 static rtx_insn *
4638 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4639 rtx_insn *(*make_raw)(rtx))
4641 rtx_insn *last = after;
4643 gcc_assert (after);
4645 if (x == NULL_RTX)
4646 return last;
4648 switch (GET_CODE (x))
4650 case DEBUG_INSN:
4651 case INSN:
4652 case JUMP_INSN:
4653 case CALL_INSN:
4654 case CODE_LABEL:
4655 case BARRIER:
4656 case NOTE:
4657 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4658 break;
4660 #ifdef ENABLE_RTL_CHECKING
4661 case SEQUENCE:
4662 gcc_unreachable ();
4663 break;
4664 #endif
4666 default:
4667 last = (*make_raw) (x);
4668 add_insn_after (last, after, bb);
4669 break;
4672 return last;
4675 /* Make X be output after the insn AFTER and set the BB of insn. If
4676 BB is NULL, an attempt is made to infer the BB from AFTER. */
4678 rtx_insn *
4679 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4681 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4685 /* Make an insn of code JUMP_INSN with body X
4686 and output it after the insn AFTER. */
4688 rtx_jump_insn *
4689 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4691 return as_a <rtx_jump_insn *> (
4692 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4695 /* Make an instruction with body X and code CALL_INSN
4696 and output it after the instruction AFTER. */
4698 rtx_insn *
4699 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4701 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4704 /* Make an instruction with body X and code CALL_INSN
4705 and output it after the instruction AFTER. */
4707 rtx_insn *
4708 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4710 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4713 /* Make an insn of code BARRIER
4714 and output it after the insn AFTER. */
4716 rtx_barrier *
4717 emit_barrier_after (rtx_insn *after)
4719 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4721 INSN_UID (insn) = cur_insn_uid++;
4723 add_insn_after (insn, after, NULL);
4724 return insn;
4727 /* Emit the label LABEL after the insn AFTER. */
4729 rtx_insn *
4730 emit_label_after (rtx_insn *label, rtx_insn *after)
4732 gcc_checking_assert (INSN_UID (label) == 0);
4733 INSN_UID (label) = cur_insn_uid++;
4734 add_insn_after (label, after, NULL);
4735 return label;
4738 /* Notes require a bit of special handling: Some notes need to have their
4739 BLOCK_FOR_INSN set, others should never have it set, and some should
4740 have it set or clear depending on the context. */
4742 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4743 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4744 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4746 static bool
4747 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4749 switch (subtype)
4751 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4752 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4753 return true;
4755 /* Notes for var tracking and EH region markers can appear between or
4756 inside basic blocks. If the caller is emitting on the basic block
4757 boundary, do not set BLOCK_FOR_INSN on the new note. */
4758 case NOTE_INSN_VAR_LOCATION:
4759 case NOTE_INSN_EH_REGION_BEG:
4760 case NOTE_INSN_EH_REGION_END:
4761 return on_bb_boundary_p;
4763 /* Otherwise, BLOCK_FOR_INSN must be set. */
4764 default:
4765 return false;
4769 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4771 rtx_note *
4772 emit_note_after (enum insn_note subtype, rtx_insn *after)
4774 rtx_note *note = make_note_raw (subtype);
4775 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4776 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4778 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4779 add_insn_after_nobb (note, after);
4780 else
4781 add_insn_after (note, after, bb);
4782 return note;
4785 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4787 rtx_note *
4788 emit_note_before (enum insn_note subtype, rtx_insn *before)
4790 rtx_note *note = make_note_raw (subtype);
4791 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4792 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4794 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4795 add_insn_before_nobb (note, before);
4796 else
4797 add_insn_before (note, before, bb);
4798 return note;
4801 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4802 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4804 static rtx_insn *
4805 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4806 rtx_insn *(*make_raw) (rtx))
4808 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4810 if (pattern == NULL_RTX || !loc)
4811 return last;
4813 after = NEXT_INSN (after);
4814 while (1)
4816 if (active_insn_p (after)
4817 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4818 && !INSN_LOCATION (after))
4819 INSN_LOCATION (after) = loc;
4820 if (after == last)
4821 break;
4822 after = NEXT_INSN (after);
4824 return last;
4827 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4828 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4829 any DEBUG_INSNs. */
4831 static rtx_insn *
4832 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4833 rtx_insn *(*make_raw) (rtx))
4835 rtx_insn *prev = after;
4837 if (skip_debug_insns)
4838 while (DEBUG_INSN_P (prev))
4839 prev = PREV_INSN (prev);
4841 if (INSN_P (prev))
4842 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4843 make_raw);
4844 else
4845 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4848 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4849 rtx_insn *
4850 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4852 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4855 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4856 rtx_insn *
4857 emit_insn_after (rtx pattern, rtx_insn *after)
4859 return emit_pattern_after (pattern, after, true, make_insn_raw);
4862 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4863 rtx_jump_insn *
4864 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4866 return as_a <rtx_jump_insn *> (
4867 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4870 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4871 rtx_jump_insn *
4872 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4874 return as_a <rtx_jump_insn *> (
4875 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4878 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4879 rtx_insn *
4880 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4882 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4885 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4886 rtx_insn *
4887 emit_call_insn_after (rtx pattern, rtx_insn *after)
4889 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4892 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4893 rtx_insn *
4894 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4896 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4899 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4900 rtx_insn *
4901 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4903 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4906 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4907 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4908 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4909 CALL_INSN, etc. */
4911 static rtx_insn *
4912 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4913 bool insnp, rtx_insn *(*make_raw) (rtx))
4915 rtx_insn *first = PREV_INSN (before);
4916 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4917 insnp ? before : NULL,
4918 NULL, make_raw);
4920 if (pattern == NULL_RTX || !loc)
4921 return last;
4923 if (!first)
4924 first = get_insns ();
4925 else
4926 first = NEXT_INSN (first);
4927 while (1)
4929 if (active_insn_p (first)
4930 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4931 && !INSN_LOCATION (first))
4932 INSN_LOCATION (first) = loc;
4933 if (first == last)
4934 break;
4935 first = NEXT_INSN (first);
4937 return last;
4940 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4941 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4942 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4943 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4945 static rtx_insn *
4946 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4947 bool insnp, rtx_insn *(*make_raw) (rtx))
4949 rtx_insn *next = before;
4951 if (skip_debug_insns)
4952 while (DEBUG_INSN_P (next))
4953 next = PREV_INSN (next);
4955 if (INSN_P (next))
4956 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4957 insnp, make_raw);
4958 else
4959 return emit_pattern_before_noloc (pattern, before,
4960 insnp ? before : NULL,
4961 NULL, make_raw);
4964 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4965 rtx_insn *
4966 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4968 return emit_pattern_before_setloc (pattern, before, loc, true,
4969 make_insn_raw);
4972 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4973 rtx_insn *
4974 emit_insn_before (rtx pattern, rtx_insn *before)
4976 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4979 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4980 rtx_jump_insn *
4981 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4983 return as_a <rtx_jump_insn *> (
4984 emit_pattern_before_setloc (pattern, before, loc, false,
4985 make_jump_insn_raw));
4988 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4989 rtx_jump_insn *
4990 emit_jump_insn_before (rtx pattern, rtx_insn *before)
4992 return as_a <rtx_jump_insn *> (
4993 emit_pattern_before (pattern, before, true, false,
4994 make_jump_insn_raw));
4997 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4998 rtx_insn *
4999 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5001 return emit_pattern_before_setloc (pattern, before, loc, false,
5002 make_call_insn_raw);
5005 /* Like emit_call_insn_before_noloc,
5006 but set insn_location according to BEFORE. */
5007 rtx_insn *
5008 emit_call_insn_before (rtx pattern, rtx_insn *before)
5010 return emit_pattern_before (pattern, before, true, false,
5011 make_call_insn_raw);
5014 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5015 rtx_insn *
5016 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5018 return emit_pattern_before_setloc (pattern, before, loc, false,
5019 make_debug_insn_raw);
5022 /* Like emit_debug_insn_before_noloc,
5023 but set insn_location according to BEFORE. */
5024 rtx_insn *
5025 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5027 return emit_pattern_before (pattern, before, false, false,
5028 make_debug_insn_raw);
5031 /* Take X and emit it at the end of the doubly-linked
5032 INSN list.
5034 Returns the last insn emitted. */
5036 rtx_insn *
5037 emit_insn (rtx x)
5039 rtx_insn *last = get_last_insn ();
5040 rtx_insn *insn;
5042 if (x == NULL_RTX)
5043 return last;
5045 switch (GET_CODE (x))
5047 case DEBUG_INSN:
5048 case INSN:
5049 case JUMP_INSN:
5050 case CALL_INSN:
5051 case CODE_LABEL:
5052 case BARRIER:
5053 case NOTE:
5054 insn = as_a <rtx_insn *> (x);
5055 while (insn)
5057 rtx_insn *next = NEXT_INSN (insn);
5058 add_insn (insn);
5059 last = insn;
5060 insn = next;
5062 break;
5064 #ifdef ENABLE_RTL_CHECKING
5065 case JUMP_TABLE_DATA:
5066 case SEQUENCE:
5067 gcc_unreachable ();
5068 break;
5069 #endif
5071 default:
5072 last = make_insn_raw (x);
5073 add_insn (last);
5074 break;
5077 return last;
5080 /* Make an insn of code DEBUG_INSN with pattern X
5081 and add it to the end of the doubly-linked list. */
5083 rtx_insn *
5084 emit_debug_insn (rtx x)
5086 rtx_insn *last = get_last_insn ();
5087 rtx_insn *insn;
5089 if (x == NULL_RTX)
5090 return last;
5092 switch (GET_CODE (x))
5094 case DEBUG_INSN:
5095 case INSN:
5096 case JUMP_INSN:
5097 case CALL_INSN:
5098 case CODE_LABEL:
5099 case BARRIER:
5100 case NOTE:
5101 insn = as_a <rtx_insn *> (x);
5102 while (insn)
5104 rtx_insn *next = NEXT_INSN (insn);
5105 add_insn (insn);
5106 last = insn;
5107 insn = next;
5109 break;
5111 #ifdef ENABLE_RTL_CHECKING
5112 case JUMP_TABLE_DATA:
5113 case SEQUENCE:
5114 gcc_unreachable ();
5115 break;
5116 #endif
5118 default:
5119 last = make_debug_insn_raw (x);
5120 add_insn (last);
5121 break;
5124 return last;
5127 /* Make an insn of code JUMP_INSN with pattern X
5128 and add it to the end of the doubly-linked list. */
5130 rtx_insn *
5131 emit_jump_insn (rtx x)
5133 rtx_insn *last = NULL;
5134 rtx_insn *insn;
5136 switch (GET_CODE (x))
5138 case DEBUG_INSN:
5139 case INSN:
5140 case JUMP_INSN:
5141 case CALL_INSN:
5142 case CODE_LABEL:
5143 case BARRIER:
5144 case NOTE:
5145 insn = as_a <rtx_insn *> (x);
5146 while (insn)
5148 rtx_insn *next = NEXT_INSN (insn);
5149 add_insn (insn);
5150 last = insn;
5151 insn = next;
5153 break;
5155 #ifdef ENABLE_RTL_CHECKING
5156 case JUMP_TABLE_DATA:
5157 case SEQUENCE:
5158 gcc_unreachable ();
5159 break;
5160 #endif
5162 default:
5163 last = make_jump_insn_raw (x);
5164 add_insn (last);
5165 break;
5168 return last;
5171 /* Make an insn of code JUMP_INSN with pattern X,
5172 add a REG_BR_PROB note that indicates very likely probability,
5173 and add it to the end of the doubly-linked list. */
5175 rtx_insn *
5176 emit_likely_jump_insn (rtx x)
5178 rtx_insn *jump = emit_jump_insn (x);
5179 add_reg_br_prob_note (jump, profile_probability::very_likely ());
5180 return jump;
5183 /* Make an insn of code JUMP_INSN with pattern X,
5184 add a REG_BR_PROB note that indicates very unlikely probability,
5185 and add it to the end of the doubly-linked list. */
5187 rtx_insn *
5188 emit_unlikely_jump_insn (rtx x)
5190 rtx_insn *jump = emit_jump_insn (x);
5191 add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
5192 return jump;
5195 /* Make an insn of code CALL_INSN with pattern X
5196 and add it to the end of the doubly-linked list. */
5198 rtx_insn *
5199 emit_call_insn (rtx x)
5201 rtx_insn *insn;
5203 switch (GET_CODE (x))
5205 case DEBUG_INSN:
5206 case INSN:
5207 case JUMP_INSN:
5208 case CALL_INSN:
5209 case CODE_LABEL:
5210 case BARRIER:
5211 case NOTE:
5212 insn = emit_insn (x);
5213 break;
5215 #ifdef ENABLE_RTL_CHECKING
5216 case SEQUENCE:
5217 case JUMP_TABLE_DATA:
5218 gcc_unreachable ();
5219 break;
5220 #endif
5222 default:
5223 insn = make_call_insn_raw (x);
5224 add_insn (insn);
5225 break;
5228 return insn;
5231 /* Add the label LABEL to the end of the doubly-linked list. */
5233 rtx_code_label *
5234 emit_label (rtx uncast_label)
5236 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5238 gcc_checking_assert (INSN_UID (label) == 0);
5239 INSN_UID (label) = cur_insn_uid++;
5240 add_insn (label);
5241 return label;
5244 /* Make an insn of code JUMP_TABLE_DATA
5245 and add it to the end of the doubly-linked list. */
5247 rtx_jump_table_data *
5248 emit_jump_table_data (rtx table)
5250 rtx_jump_table_data *jump_table_data =
5251 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5252 INSN_UID (jump_table_data) = cur_insn_uid++;
5253 PATTERN (jump_table_data) = table;
5254 BLOCK_FOR_INSN (jump_table_data) = NULL;
5255 add_insn (jump_table_data);
5256 return jump_table_data;
5259 /* Make an insn of code BARRIER
5260 and add it to the end of the doubly-linked list. */
5262 rtx_barrier *
5263 emit_barrier (void)
5265 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5266 INSN_UID (barrier) = cur_insn_uid++;
5267 add_insn (barrier);
5268 return barrier;
5271 /* Emit a copy of note ORIG. */
5273 rtx_note *
5274 emit_note_copy (rtx_note *orig)
5276 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5277 rtx_note *note = make_note_raw (kind);
5278 NOTE_DATA (note) = NOTE_DATA (orig);
5279 add_insn (note);
5280 return note;
5283 /* Make an insn of code NOTE or type NOTE_NO
5284 and add it to the end of the doubly-linked list. */
5286 rtx_note *
5287 emit_note (enum insn_note kind)
5289 rtx_note *note = make_note_raw (kind);
5290 add_insn (note);
5291 return note;
5294 /* Emit a clobber of lvalue X. */
5296 rtx_insn *
5297 emit_clobber (rtx x)
5299 /* CONCATs should not appear in the insn stream. */
5300 if (GET_CODE (x) == CONCAT)
5302 emit_clobber (XEXP (x, 0));
5303 return emit_clobber (XEXP (x, 1));
5305 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5308 /* Return a sequence of insns to clobber lvalue X. */
5310 rtx_insn *
5311 gen_clobber (rtx x)
5313 rtx_insn *seq;
5315 start_sequence ();
5316 emit_clobber (x);
5317 seq = get_insns ();
5318 end_sequence ();
5319 return seq;
5322 /* Emit a use of rvalue X. */
5324 rtx_insn *
5325 emit_use (rtx x)
5327 /* CONCATs should not appear in the insn stream. */
5328 if (GET_CODE (x) == CONCAT)
5330 emit_use (XEXP (x, 0));
5331 return emit_use (XEXP (x, 1));
5333 return emit_insn (gen_rtx_USE (VOIDmode, x));
5336 /* Return a sequence of insns to use rvalue X. */
5338 rtx_insn *
5339 gen_use (rtx x)
5341 rtx_insn *seq;
5343 start_sequence ();
5344 emit_use (x);
5345 seq = get_insns ();
5346 end_sequence ();
5347 return seq;
5350 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5351 Return the set in INSN that such notes describe, or NULL if the notes
5352 have no meaning for INSN. */
5355 set_for_reg_notes (rtx insn)
5357 rtx pat, reg;
5359 if (!INSN_P (insn))
5360 return NULL_RTX;
5362 pat = PATTERN (insn);
5363 if (GET_CODE (pat) == PARALLEL)
5365 /* We do not use single_set because that ignores SETs of unused
5366 registers. REG_EQUAL and REG_EQUIV notes really do require the
5367 PARALLEL to have a single SET. */
5368 if (multiple_sets (insn))
5369 return NULL_RTX;
5370 pat = XVECEXP (pat, 0, 0);
5373 if (GET_CODE (pat) != SET)
5374 return NULL_RTX;
5376 reg = SET_DEST (pat);
5378 /* Notes apply to the contents of a STRICT_LOW_PART. */
5379 if (GET_CODE (reg) == STRICT_LOW_PART
5380 || GET_CODE (reg) == ZERO_EXTRACT)
5381 reg = XEXP (reg, 0);
5383 /* Check that we have a register. */
5384 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5385 return NULL_RTX;
5387 return pat;
5390 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5391 note of this type already exists, remove it first. */
5394 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5396 rtx note = find_reg_note (insn, kind, NULL_RTX);
5398 switch (kind)
5400 case REG_EQUAL:
5401 case REG_EQUIV:
5402 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5403 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5404 return NULL_RTX;
5406 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5407 It serves no useful purpose and breaks eliminate_regs. */
5408 if (GET_CODE (datum) == ASM_OPERANDS)
5409 return NULL_RTX;
5411 /* Notes with side effects are dangerous. Even if the side-effect
5412 initially mirrors one in PATTERN (INSN), later optimizations
5413 might alter the way that the final register value is calculated
5414 and so move or alter the side-effect in some way. The note would
5415 then no longer be a valid substitution for SET_SRC. */
5416 if (side_effects_p (datum))
5417 return NULL_RTX;
5418 break;
5420 default:
5421 break;
5424 if (note)
5425 XEXP (note, 0) = datum;
5426 else
5428 add_reg_note (insn, kind, datum);
5429 note = REG_NOTES (insn);
5432 switch (kind)
5434 case REG_EQUAL:
5435 case REG_EQUIV:
5436 df_notes_rescan (as_a <rtx_insn *> (insn));
5437 break;
5438 default:
5439 break;
5442 return note;
5445 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5447 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5449 rtx set = set_for_reg_notes (insn);
5451 if (set && SET_DEST (set) == dst)
5452 return set_unique_reg_note (insn, kind, datum);
5453 return NULL_RTX;
5456 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5457 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5458 is true.
5460 If X is a label, it is simply added into the insn chain. */
5462 rtx_insn *
5463 emit (rtx x, bool allow_barrier_p)
5465 enum rtx_code code = classify_insn (x);
5467 switch (code)
5469 case CODE_LABEL:
5470 return emit_label (x);
5471 case INSN:
5472 return emit_insn (x);
5473 case JUMP_INSN:
5475 rtx_insn *insn = emit_jump_insn (x);
5476 if (allow_barrier_p
5477 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5478 return emit_barrier ();
5479 return insn;
5481 case CALL_INSN:
5482 return emit_call_insn (x);
5483 case DEBUG_INSN:
5484 return emit_debug_insn (x);
5485 default:
5486 gcc_unreachable ();
5490 /* Space for free sequence stack entries. */
5491 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5493 /* Begin emitting insns to a sequence. If this sequence will contain
5494 something that might cause the compiler to pop arguments to function
5495 calls (because those pops have previously been deferred; see
5496 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5497 before calling this function. That will ensure that the deferred
5498 pops are not accidentally emitted in the middle of this sequence. */
5500 void
5501 start_sequence (void)
5503 struct sequence_stack *tem;
5505 if (free_sequence_stack != NULL)
5507 tem = free_sequence_stack;
5508 free_sequence_stack = tem->next;
5510 else
5511 tem = ggc_alloc<sequence_stack> ();
5513 tem->next = get_current_sequence ()->next;
5514 tem->first = get_insns ();
5515 tem->last = get_last_insn ();
5516 get_current_sequence ()->next = tem;
5518 set_first_insn (0);
5519 set_last_insn (0);
5522 /* Set up the insn chain starting with FIRST as the current sequence,
5523 saving the previously current one. See the documentation for
5524 start_sequence for more information about how to use this function. */
5526 void
5527 push_to_sequence (rtx_insn *first)
5529 rtx_insn *last;
5531 start_sequence ();
5533 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5536 set_first_insn (first);
5537 set_last_insn (last);
5540 /* Like push_to_sequence, but take the last insn as an argument to avoid
5541 looping through the list. */
5543 void
5544 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5546 start_sequence ();
5548 set_first_insn (first);
5549 set_last_insn (last);
5552 /* Set up the outer-level insn chain
5553 as the current sequence, saving the previously current one. */
5555 void
5556 push_topmost_sequence (void)
5558 struct sequence_stack *top;
5560 start_sequence ();
5562 top = get_topmost_sequence ();
5563 set_first_insn (top->first);
5564 set_last_insn (top->last);
5567 /* After emitting to the outer-level insn chain, update the outer-level
5568 insn chain, and restore the previous saved state. */
5570 void
5571 pop_topmost_sequence (void)
5573 struct sequence_stack *top;
5575 top = get_topmost_sequence ();
5576 top->first = get_insns ();
5577 top->last = get_last_insn ();
5579 end_sequence ();
5582 /* After emitting to a sequence, restore previous saved state.
5584 To get the contents of the sequence just made, you must call
5585 `get_insns' *before* calling here.
5587 If the compiler might have deferred popping arguments while
5588 generating this sequence, and this sequence will not be immediately
5589 inserted into the instruction stream, use do_pending_stack_adjust
5590 before calling get_insns. That will ensure that the deferred
5591 pops are inserted into this sequence, and not into some random
5592 location in the instruction stream. See INHIBIT_DEFER_POP for more
5593 information about deferred popping of arguments. */
5595 void
5596 end_sequence (void)
5598 struct sequence_stack *tem = get_current_sequence ()->next;
5600 set_first_insn (tem->first);
5601 set_last_insn (tem->last);
5602 get_current_sequence ()->next = tem->next;
5604 memset (tem, 0, sizeof (*tem));
5605 tem->next = free_sequence_stack;
5606 free_sequence_stack = tem;
5609 /* Return true if currently emitting into a sequence. */
5611 bool
5612 in_sequence_p (void)
5614 return get_current_sequence ()->next != 0;
5617 /* Put the various virtual registers into REGNO_REG_RTX. */
5619 static void
5620 init_virtual_regs (void)
5622 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5623 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5624 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5625 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5626 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5627 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5628 = virtual_preferred_stack_boundary_rtx;
5632 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5633 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5634 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5635 static int copy_insn_n_scratches;
5637 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5638 copied an ASM_OPERANDS.
5639 In that case, it is the original input-operand vector. */
5640 static rtvec orig_asm_operands_vector;
5642 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5643 copied an ASM_OPERANDS.
5644 In that case, it is the copied input-operand vector. */
5645 static rtvec copy_asm_operands_vector;
5647 /* Likewise for the constraints vector. */
5648 static rtvec orig_asm_constraints_vector;
5649 static rtvec copy_asm_constraints_vector;
5651 /* Recursively create a new copy of an rtx for copy_insn.
5652 This function differs from copy_rtx in that it handles SCRATCHes and
5653 ASM_OPERANDs properly.
5654 Normally, this function is not used directly; use copy_insn as front end.
5655 However, you could first copy an insn pattern with copy_insn and then use
5656 this function afterwards to properly copy any REG_NOTEs containing
5657 SCRATCHes. */
5660 copy_insn_1 (rtx orig)
5662 rtx copy;
5663 int i, j;
5664 RTX_CODE code;
5665 const char *format_ptr;
5667 if (orig == NULL)
5668 return NULL;
5670 code = GET_CODE (orig);
5672 switch (code)
5674 case REG:
5675 case DEBUG_EXPR:
5676 CASE_CONST_ANY:
5677 case SYMBOL_REF:
5678 case CODE_LABEL:
5679 case PC:
5680 case RETURN:
5681 case SIMPLE_RETURN:
5682 return orig;
5683 case CLOBBER:
5684 /* Share clobbers of hard registers, but do not share pseudo reg
5685 clobbers or clobbers of hard registers that originated as pseudos.
5686 This is needed to allow safe register renaming. */
5687 if (REG_P (XEXP (orig, 0))
5688 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5689 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5690 return orig;
5691 break;
5693 case SCRATCH:
5694 for (i = 0; i < copy_insn_n_scratches; i++)
5695 if (copy_insn_scratch_in[i] == orig)
5696 return copy_insn_scratch_out[i];
5697 break;
5699 case CONST:
5700 if (shared_const_p (orig))
5701 return orig;
5702 break;
5704 /* A MEM with a constant address is not sharable. The problem is that
5705 the constant address may need to be reloaded. If the mem is shared,
5706 then reloading one copy of this mem will cause all copies to appear
5707 to have been reloaded. */
5709 default:
5710 break;
5713 /* Copy the various flags, fields, and other information. We assume
5714 that all fields need copying, and then clear the fields that should
5715 not be copied. That is the sensible default behavior, and forces
5716 us to explicitly document why we are *not* copying a flag. */
5717 copy = shallow_copy_rtx (orig);
5719 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5720 if (INSN_P (orig))
5722 RTX_FLAG (copy, jump) = 0;
5723 RTX_FLAG (copy, call) = 0;
5724 RTX_FLAG (copy, frame_related) = 0;
5727 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5729 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5730 switch (*format_ptr++)
5732 case 'e':
5733 if (XEXP (orig, i) != NULL)
5734 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5735 break;
5737 case 'E':
5738 case 'V':
5739 if (XVEC (orig, i) == orig_asm_constraints_vector)
5740 XVEC (copy, i) = copy_asm_constraints_vector;
5741 else if (XVEC (orig, i) == orig_asm_operands_vector)
5742 XVEC (copy, i) = copy_asm_operands_vector;
5743 else if (XVEC (orig, i) != NULL)
5745 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5746 for (j = 0; j < XVECLEN (copy, i); j++)
5747 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5749 break;
5751 case 't':
5752 case 'w':
5753 case 'i':
5754 case 'p':
5755 case 's':
5756 case 'S':
5757 case 'u':
5758 case '0':
5759 /* These are left unchanged. */
5760 break;
5762 default:
5763 gcc_unreachable ();
5766 if (code == SCRATCH)
5768 i = copy_insn_n_scratches++;
5769 gcc_assert (i < MAX_RECOG_OPERANDS);
5770 copy_insn_scratch_in[i] = orig;
5771 copy_insn_scratch_out[i] = copy;
5773 else if (code == ASM_OPERANDS)
5775 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5776 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5777 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5778 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5781 return copy;
5784 /* Create a new copy of an rtx.
5785 This function differs from copy_rtx in that it handles SCRATCHes and
5786 ASM_OPERANDs properly.
5787 INSN doesn't really have to be a full INSN; it could be just the
5788 pattern. */
5790 copy_insn (rtx insn)
5792 copy_insn_n_scratches = 0;
5793 orig_asm_operands_vector = 0;
5794 orig_asm_constraints_vector = 0;
5795 copy_asm_operands_vector = 0;
5796 copy_asm_constraints_vector = 0;
5797 return copy_insn_1 (insn);
5800 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5801 on that assumption that INSN itself remains in its original place. */
5803 rtx_insn *
5804 copy_delay_slot_insn (rtx_insn *insn)
5806 /* Copy INSN with its rtx_code, all its notes, location etc. */
5807 insn = as_a <rtx_insn *> (copy_rtx (insn));
5808 INSN_UID (insn) = cur_insn_uid++;
5809 return insn;
5812 /* Initialize data structures and variables in this file
5813 before generating rtl for each function. */
5815 void
5816 init_emit (void)
5818 set_first_insn (NULL);
5819 set_last_insn (NULL);
5820 if (param_min_nondebug_insn_uid)
5821 cur_insn_uid = param_min_nondebug_insn_uid;
5822 else
5823 cur_insn_uid = 1;
5824 cur_debug_insn_uid = 1;
5825 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5826 first_label_num = label_num;
5827 get_current_sequence ()->next = NULL;
5829 /* Init the tables that describe all the pseudo regs. */
5831 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5833 crtl->emit.regno_pointer_align
5834 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5836 regno_reg_rtx
5837 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5839 /* Put copies of all the hard registers into regno_reg_rtx. */
5840 memcpy (regno_reg_rtx,
5841 initial_regno_reg_rtx,
5842 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5844 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5845 init_virtual_regs ();
5847 /* Indicate that the virtual registers and stack locations are
5848 all pointers. */
5849 REG_POINTER (stack_pointer_rtx) = 1;
5850 REG_POINTER (frame_pointer_rtx) = 1;
5851 REG_POINTER (hard_frame_pointer_rtx) = 1;
5852 REG_POINTER (arg_pointer_rtx) = 1;
5854 REG_POINTER (virtual_incoming_args_rtx) = 1;
5855 REG_POINTER (virtual_stack_vars_rtx) = 1;
5856 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5857 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5858 REG_POINTER (virtual_cfa_rtx) = 1;
5860 #ifdef STACK_BOUNDARY
5861 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5862 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5863 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5864 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5866 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5867 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5868 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5869 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5871 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5872 #endif
5874 #ifdef INIT_EXPANDERS
5875 INIT_EXPANDERS;
5876 #endif
5879 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5881 wide_int
5882 const_vector_int_elt (const_rtx x, unsigned int i)
5884 /* First handle elements that are directly encoded. */
5885 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5886 if (i < (unsigned int) XVECLEN (x, 0))
5887 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5889 /* Identify the pattern that contains element I and work out the index of
5890 the last encoded element for that pattern. */
5891 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5892 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5893 unsigned int count = i / npatterns;
5894 unsigned int pattern = i % npatterns;
5895 unsigned int final_i = encoded_nelts - npatterns + pattern;
5897 /* If there are no steps, the final encoded value is the right one. */
5898 if (!CONST_VECTOR_STEPPED_P (x))
5899 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5901 /* Otherwise work out the value from the last two encoded elements. */
5902 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5903 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5904 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5905 rtx_mode_t (v1, elt_mode));
5906 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5909 /* Return the value of element I of CONST_VECTOR X. */
5912 const_vector_elt (const_rtx x, unsigned int i)
5914 /* First handle elements that are directly encoded. */
5915 if (i < (unsigned int) XVECLEN (x, 0))
5916 return CONST_VECTOR_ENCODED_ELT (x, i);
5918 /* If there are no steps, the final encoded value is the right one. */
5919 if (!CONST_VECTOR_STEPPED_P (x))
5921 /* Identify the pattern that contains element I and work out the index of
5922 the last encoded element for that pattern. */
5923 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5924 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5925 unsigned int pattern = i % npatterns;
5926 unsigned int final_i = encoded_nelts - npatterns + pattern;
5927 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5930 /* Otherwise work out the value from the last two encoded elements. */
5931 return immed_wide_int_const (const_vector_int_elt (x, i),
5932 GET_MODE_INNER (GET_MODE (x)));
5935 /* Return true if X is a valid element for a CONST_VECTOR of the given
5936 mode. */
5938 bool
5939 valid_for_const_vector_p (machine_mode, rtx x)
5941 return (CONST_SCALAR_INT_P (x)
5942 || CONST_POLY_INT_P (x)
5943 || CONST_DOUBLE_AS_FLOAT_P (x)
5944 || CONST_FIXED_P (x));
5947 /* Generate a vector constant of mode MODE in which every element has
5948 value ELT. */
5951 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5953 rtx_vector_builder builder (mode, 1, 1);
5954 builder.quick_push (elt);
5955 return builder.build ();
5958 /* Return a vector rtx of mode MODE in which every element has value X.
5959 The result will be a constant if X is constant. */
5962 gen_vec_duplicate (machine_mode mode, rtx x)
5964 if (valid_for_const_vector_p (mode, x))
5965 return gen_const_vec_duplicate (mode, x);
5966 return gen_rtx_VEC_DUPLICATE (mode, x);
5969 /* A subroutine of const_vec_series_p that handles the case in which:
5971 (GET_CODE (X) == CONST_VECTOR
5972 && CONST_VECTOR_NPATTERNS (X) == 1
5973 && !CONST_VECTOR_DUPLICATE_P (X))
5975 is known to hold. */
5977 bool
5978 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5980 /* Stepped sequences are only defined for integers, to avoid specifying
5981 rounding behavior. */
5982 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5983 return false;
5985 /* A non-duplicated vector with two elements can always be seen as a
5986 series with a nonzero step. Longer vectors must have a stepped
5987 encoding. */
5988 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5989 && !CONST_VECTOR_STEPPED_P (x))
5990 return false;
5992 /* Calculate the step between the first and second elements. */
5993 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5994 rtx base = CONST_VECTOR_ELT (x, 0);
5995 rtx step = simplify_binary_operation (MINUS, inner,
5996 CONST_VECTOR_ENCODED_ELT (x, 1), base);
5997 if (rtx_equal_p (step, CONST0_RTX (inner)))
5998 return false;
6000 /* If we have a stepped encoding, check that the step between the
6001 second and third elements is the same as STEP. */
6002 if (CONST_VECTOR_STEPPED_P (x))
6004 rtx diff = simplify_binary_operation (MINUS, inner,
6005 CONST_VECTOR_ENCODED_ELT (x, 2),
6006 CONST_VECTOR_ENCODED_ELT (x, 1));
6007 if (!rtx_equal_p (step, diff))
6008 return false;
6011 *base_out = base;
6012 *step_out = step;
6013 return true;
6016 /* Generate a vector constant of mode MODE in which element I has
6017 the value BASE + I * STEP. */
6020 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6022 gcc_assert (valid_for_const_vector_p (mode, base)
6023 && valid_for_const_vector_p (mode, step));
6025 rtx_vector_builder builder (mode, 1, 3);
6026 builder.quick_push (base);
6027 for (int i = 1; i < 3; ++i)
6028 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6029 builder[i - 1], step));
6030 return builder.build ();
6033 /* Generate a vector of mode MODE in which element I has the value
6034 BASE + I * STEP. The result will be a constant if BASE and STEP
6035 are both constants. */
6038 gen_vec_series (machine_mode mode, rtx base, rtx step)
6040 if (step == const0_rtx)
6041 return gen_vec_duplicate (mode, base);
6042 if (valid_for_const_vector_p (mode, base)
6043 && valid_for_const_vector_p (mode, step))
6044 return gen_const_vec_series (mode, base, step);
6045 return gen_rtx_VEC_SERIES (mode, base, step);
6048 /* Generate a new vector constant for mode MODE and constant value
6049 CONSTANT. */
6051 static rtx
6052 gen_const_vector (machine_mode mode, int constant)
6054 machine_mode inner = GET_MODE_INNER (mode);
6056 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6058 rtx el = const_tiny_rtx[constant][(int) inner];
6059 gcc_assert (el);
6061 return gen_const_vec_duplicate (mode, el);
6064 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6065 all elements are zero, and the one vector when all elements are one. */
6067 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6069 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6071 /* If the values are all the same, check to see if we can use one of the
6072 standard constant vectors. */
6073 if (rtvec_all_equal_p (v))
6074 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6076 unsigned int nunits = GET_NUM_ELEM (v);
6077 rtx_vector_builder builder (mode, nunits, 1);
6078 for (unsigned int i = 0; i < nunits; ++i)
6079 builder.quick_push (RTVEC_ELT (v, i));
6080 return builder.build (v);
6083 /* Initialise global register information required by all functions. */
6085 void
6086 init_emit_regs (void)
6088 int i;
6089 machine_mode mode;
6090 mem_attrs *attrs;
6092 /* Reset register attributes */
6093 reg_attrs_htab->empty ();
6095 /* We need reg_raw_mode, so initialize the modes now. */
6096 init_reg_modes_target ();
6098 /* Assign register numbers to the globally defined register rtx. */
6099 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6100 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6101 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6102 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6103 virtual_incoming_args_rtx =
6104 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6105 virtual_stack_vars_rtx =
6106 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6107 virtual_stack_dynamic_rtx =
6108 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6109 virtual_outgoing_args_rtx =
6110 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6111 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6112 virtual_preferred_stack_boundary_rtx =
6113 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6115 /* Initialize RTL for commonly used hard registers. These are
6116 copied into regno_reg_rtx as we begin to compile each function. */
6117 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6118 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6120 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6121 return_address_pointer_rtx
6122 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6123 #endif
6125 pic_offset_table_rtx = NULL_RTX;
6126 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6127 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6129 /* Process stack-limiting command-line options. */
6130 if (opt_fstack_limit_symbol_arg != NULL)
6131 stack_limit_rtx
6132 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6133 if (opt_fstack_limit_register_no >= 0)
6134 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6136 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6138 mode = (machine_mode) i;
6139 attrs = ggc_cleared_alloc<mem_attrs> ();
6140 attrs->align = BITS_PER_UNIT;
6141 attrs->addrspace = ADDR_SPACE_GENERIC;
6142 if (mode != BLKmode && mode != VOIDmode)
6144 attrs->size_known_p = true;
6145 attrs->size = GET_MODE_SIZE (mode);
6146 if (STRICT_ALIGNMENT)
6147 attrs->align = GET_MODE_ALIGNMENT (mode);
6149 mode_mem_attrs[i] = attrs;
6152 split_branch_probability = profile_probability::uninitialized ();
6155 /* Initialize global machine_mode variables. */
6157 void
6158 init_derived_machine_modes (void)
6160 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6161 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6163 scalar_int_mode mode = mode_iter.require ();
6165 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6166 && !opt_byte_mode.exists ())
6167 opt_byte_mode = mode;
6169 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6170 && !opt_word_mode.exists ())
6171 opt_word_mode = mode;
6174 byte_mode = opt_byte_mode.require ();
6175 word_mode = opt_word_mode.require ();
6176 ptr_mode = as_a <scalar_int_mode>
6177 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6180 /* Create some permanent unique rtl objects shared between all functions. */
6182 void
6183 init_emit_once (void)
6185 int i;
6186 machine_mode mode;
6187 scalar_float_mode double_mode;
6188 opt_scalar_mode smode_iter;
6190 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6191 CONST_FIXED, and memory attribute hash tables. */
6192 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6194 #if TARGET_SUPPORTS_WIDE_INT
6195 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6196 #endif
6197 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6199 if (NUM_POLY_INT_COEFFS > 1)
6200 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6202 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6204 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6206 #ifdef INIT_EXPANDERS
6207 /* This is to initialize {init|mark|free}_machine_status before the first
6208 call to push_function_context_to. This is needed by the Chill front
6209 end which calls push_function_context_to before the first call to
6210 init_function_start. */
6211 INIT_EXPANDERS;
6212 #endif
6214 /* Create the unique rtx's for certain rtx codes and operand values. */
6216 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6217 tries to use these variables. */
6218 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6219 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6220 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6222 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6223 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6224 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6225 else
6226 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6228 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6230 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6231 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6232 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6234 dconstm0 = dconst0;
6235 dconstm0.sign = 1;
6237 dconstm1 = dconst1;
6238 dconstm1.sign = 1;
6240 dconsthalf = dconst1;
6241 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6243 real_inf (&dconstinf);
6244 real_inf (&dconstninf, true);
6246 for (i = 0; i < 3; i++)
6248 const REAL_VALUE_TYPE *const r =
6249 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6251 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6252 const_tiny_rtx[i][(int) mode] =
6253 const_double_from_real_value (*r, mode);
6255 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6256 const_tiny_rtx[i][(int) mode] =
6257 const_double_from_real_value (*r, mode);
6259 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6261 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6262 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6264 for (mode = MIN_MODE_PARTIAL_INT;
6265 mode <= MAX_MODE_PARTIAL_INT;
6266 mode = (machine_mode)((int)(mode) + 1))
6267 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6270 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6272 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6273 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6275 /* For BImode, 1 and -1 are unsigned and signed interpretations
6276 of the same value. */
6277 for (mode = MIN_MODE_BOOL;
6278 mode <= MAX_MODE_BOOL;
6279 mode = (machine_mode)((int)(mode) + 1))
6281 const_tiny_rtx[0][(int) mode] = const0_rtx;
6282 if (mode == BImode)
6284 const_tiny_rtx[1][(int) mode] = const_true_rtx;
6285 const_tiny_rtx[3][(int) mode] = const_true_rtx;
6287 else
6289 const_tiny_rtx[1][(int) mode] = const1_rtx;
6290 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6294 for (mode = MIN_MODE_PARTIAL_INT;
6295 mode <= MAX_MODE_PARTIAL_INT;
6296 mode = (machine_mode)((int)(mode) + 1))
6297 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6299 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6301 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6302 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6305 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6307 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6308 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6311 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6313 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6314 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6315 if (GET_MODE_INNER (mode) == BImode)
6316 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6317 interpretations of the same value. */
6318 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6319 else
6320 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6323 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6325 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6326 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6327 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6330 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6332 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6333 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6336 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6338 scalar_mode smode = smode_iter.require ();
6339 FCONST0 (smode).data.high = 0;
6340 FCONST0 (smode).data.low = 0;
6341 FCONST0 (smode).mode = smode;
6342 const_tiny_rtx[0][(int) smode]
6343 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6346 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6348 scalar_mode smode = smode_iter.require ();
6349 FCONST0 (smode).data.high = 0;
6350 FCONST0 (smode).data.low = 0;
6351 FCONST0 (smode).mode = smode;
6352 const_tiny_rtx[0][(int) smode]
6353 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6356 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6358 scalar_mode smode = smode_iter.require ();
6359 FCONST0 (smode).data.high = 0;
6360 FCONST0 (smode).data.low = 0;
6361 FCONST0 (smode).mode = smode;
6362 const_tiny_rtx[0][(int) smode]
6363 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6365 /* We store the value 1. */
6366 FCONST1 (smode).data.high = 0;
6367 FCONST1 (smode).data.low = 0;
6368 FCONST1 (smode).mode = smode;
6369 FCONST1 (smode).data
6370 = double_int_one.lshift (GET_MODE_FBIT (smode),
6371 HOST_BITS_PER_DOUBLE_INT,
6372 SIGNED_FIXED_POINT_MODE_P (smode));
6373 const_tiny_rtx[1][(int) smode]
6374 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6377 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6379 scalar_mode smode = smode_iter.require ();
6380 FCONST0 (smode).data.high = 0;
6381 FCONST0 (smode).data.low = 0;
6382 FCONST0 (smode).mode = smode;
6383 const_tiny_rtx[0][(int) smode]
6384 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6386 /* We store the value 1. */
6387 FCONST1 (smode).data.high = 0;
6388 FCONST1 (smode).data.low = 0;
6389 FCONST1 (smode).mode = smode;
6390 FCONST1 (smode).data
6391 = double_int_one.lshift (GET_MODE_FBIT (smode),
6392 HOST_BITS_PER_DOUBLE_INT,
6393 SIGNED_FIXED_POINT_MODE_P (smode));
6394 const_tiny_rtx[1][(int) smode]
6395 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6398 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6400 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6403 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6405 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6408 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6410 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6411 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6414 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6416 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6417 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6420 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6421 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6422 const_tiny_rtx[0][i] = const0_rtx;
6424 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6425 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6426 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6427 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6428 /*prev_insn=*/NULL,
6429 /*next_insn=*/NULL,
6430 /*bb=*/NULL,
6431 /*pattern=*/NULL_RTX,
6432 /*location=*/-1,
6433 CODE_FOR_nothing,
6434 /*reg_notes=*/NULL_RTX);
6437 /* Produce exact duplicate of insn INSN after AFTER.
6438 Care updating of libcall regions if present. */
6440 rtx_insn *
6441 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6443 rtx_insn *new_rtx;
6444 rtx link;
6446 switch (GET_CODE (insn))
6448 case INSN:
6449 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6450 break;
6452 case JUMP_INSN:
6453 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6454 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6455 break;
6457 case DEBUG_INSN:
6458 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6459 break;
6461 case CALL_INSN:
6462 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6463 if (CALL_INSN_FUNCTION_USAGE (insn))
6464 CALL_INSN_FUNCTION_USAGE (new_rtx)
6465 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6466 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6467 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6468 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6469 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6470 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6471 break;
6473 default:
6474 gcc_unreachable ();
6477 /* Update LABEL_NUSES. */
6478 if (NONDEBUG_INSN_P (insn))
6479 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6481 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6483 /* If the old insn is frame related, then so is the new one. This is
6484 primarily needed for IA-64 unwind info which marks epilogue insns,
6485 which may be duplicated by the basic block reordering code. */
6486 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6488 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6489 rtx *ptail = &REG_NOTES (new_rtx);
6490 while (*ptail != NULL_RTX)
6491 ptail = &XEXP (*ptail, 1);
6493 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6494 will make them. REG_LABEL_TARGETs are created there too, but are
6495 supposed to be sticky, so we copy them. */
6496 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6497 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6499 *ptail = duplicate_reg_note (link);
6500 ptail = &XEXP (*ptail, 1);
6503 INSN_CODE (new_rtx) = INSN_CODE (insn);
6504 return new_rtx;
6507 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6509 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6511 if (hard_reg_clobbers[mode][regno])
6512 return hard_reg_clobbers[mode][regno];
6513 else
6514 return (hard_reg_clobbers[mode][regno] =
6515 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6518 location_t prologue_location;
6519 location_t epilogue_location;
6521 /* Hold current location information and last location information, so the
6522 datastructures are built lazily only when some instructions in given
6523 place are needed. */
6524 static location_t curr_location;
6526 /* Allocate insn location datastructure. */
6527 void
6528 insn_locations_init (void)
6530 prologue_location = epilogue_location = 0;
6531 curr_location = UNKNOWN_LOCATION;
6534 /* At the end of emit stage, clear current location. */
6535 void
6536 insn_locations_finalize (void)
6538 epilogue_location = curr_location;
6539 curr_location = UNKNOWN_LOCATION;
6542 /* Set current location. */
6543 void
6544 set_curr_insn_location (location_t location)
6546 curr_location = location;
6549 /* Get current location. */
6550 location_t
6551 curr_insn_location (void)
6553 return curr_location;
6556 /* Set the location of the insn chain starting at INSN to LOC. */
6557 void
6558 set_insn_locations (rtx_insn *insn, location_t loc)
6560 while (insn)
6562 if (INSN_P (insn))
6563 INSN_LOCATION (insn) = loc;
6564 insn = NEXT_INSN (insn);
6568 /* Return lexical scope block insn belongs to. */
6569 tree
6570 insn_scope (const rtx_insn *insn)
6572 return LOCATION_BLOCK (INSN_LOCATION (insn));
6575 /* Return line number of the statement that produced this insn. */
6577 insn_line (const rtx_insn *insn)
6579 return LOCATION_LINE (INSN_LOCATION (insn));
6582 /* Return source file of the statement that produced this insn. */
6583 const char *
6584 insn_file (const rtx_insn *insn)
6586 return LOCATION_FILE (INSN_LOCATION (insn));
6589 /* Return expanded location of the statement that produced this insn. */
6590 expanded_location
6591 insn_location (const rtx_insn *insn)
6593 return expand_location (INSN_LOCATION (insn));
6596 /* Return true if memory model MODEL requires a pre-operation (release-style)
6597 barrier or a post-operation (acquire-style) barrier. While not universal,
6598 this function matches behavior of several targets. */
6600 bool
6601 need_atomic_barrier_p (enum memmodel model, bool pre)
6603 switch (model & MEMMODEL_BASE_MASK)
6605 case MEMMODEL_RELAXED:
6606 case MEMMODEL_CONSUME:
6607 return false;
6608 case MEMMODEL_RELEASE:
6609 return pre;
6610 case MEMMODEL_ACQUIRE:
6611 return !pre;
6612 case MEMMODEL_ACQ_REL:
6613 case MEMMODEL_SEQ_CST:
6614 return true;
6615 default:
6616 gcc_unreachable ();
6620 /* Return a constant shift amount for shifting a value of mode MODE
6621 by VALUE bits. */
6624 gen_int_shift_amount (machine_mode, poly_int64 value)
6626 /* Use a 64-bit mode, to avoid any truncation.
6628 ??? Perhaps this should be automatically derived from the .md files
6629 instead, or perhaps have a target hook. */
6630 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6631 ? DImode
6632 : int_mode_for_size (64, 0).require ());
6633 return gen_int_mode (value, shift_mode);
6636 /* Initialize fields of rtl_data related to stack alignment. */
6638 void
6639 rtl_data::init_stack_alignment ()
6641 stack_alignment_needed = STACK_BOUNDARY;
6642 max_used_stack_slot_alignment = STACK_BOUNDARY;
6643 stack_alignment_estimated = 0;
6644 preferred_stack_boundary = STACK_BOUNDARY;
6648 #include "gt-emit-rtl.h"