[Ada] Add special bypass for obsolete code pattern
[official-gcc.git] / gcc / emit-rtl.c
bloba667cdab94e63c520da5e9b305e2f9121154f606
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63 #include "rtx-vector-builder.h"
64 #include "gimple.h"
65 #include "gimple-ssa.h"
66 #include "gimplify.h"
68 struct target_rtl default_target_rtl;
69 #if SWITCHABLE_TARGET
70 struct target_rtl *this_target_rtl = &default_target_rtl;
71 #endif
73 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
75 /* Commonly used modes. */
77 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
78 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
79 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
81 /* Datastructures maintained for currently processed function in RTL form. */
83 struct rtl_data x_rtl;
85 /* Indexed by pseudo register number, gives the rtx for that pseudo.
86 Allocated in parallel with regno_pointer_align.
87 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
88 with length attribute nested in top level structures. */
90 rtx * regno_reg_rtx;
92 /* This is *not* reset after each function. It gives each CODE_LABEL
93 in the entire compilation a unique label number. */
95 static GTY(()) int label_num = 1;
97 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
99 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
100 is set only for MODE_INT and MODE_VECTOR_INT modes. */
102 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
104 rtx const_true_rtx;
106 REAL_VALUE_TYPE dconst0;
107 REAL_VALUE_TYPE dconst1;
108 REAL_VALUE_TYPE dconst2;
109 REAL_VALUE_TYPE dconstm1;
110 REAL_VALUE_TYPE dconsthalf;
112 /* Record fixed-point constant 0 and 1. */
113 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116 /* We make one copy of (const_int C) where C is in
117 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
118 to save space during the compilation and simplify comparisons of
119 integers. */
121 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
123 /* Standard pieces of rtx, to be substituted directly into things. */
124 rtx pc_rtx;
125 rtx ret_rtx;
126 rtx simple_return_rtx;
127 rtx cc0_rtx;
129 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
130 this pointer should normally never be dereferenced), but is required to be
131 distinct from NULL_RTX. Currently used by peephole2 pass. */
132 rtx_insn *invalid_insn_rtx;
134 /* A hash table storing CONST_INTs whose absolute value is greater
135 than MAX_SAVED_CONST_INT. */
137 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
139 typedef HOST_WIDE_INT compare_type;
141 static hashval_t hash (rtx i);
142 static bool equal (rtx i, HOST_WIDE_INT h);
145 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
149 static hashval_t hash (rtx x);
150 static bool equal (rtx x, rtx y);
153 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
155 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
157 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
159 static hashval_t hash (rtx x);
160 static bool equal (rtx x, const compare_type &y);
163 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
165 /* A hash table storing register attribute structures. */
166 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
168 static hashval_t hash (reg_attrs *x);
169 static bool equal (reg_attrs *a, reg_attrs *b);
172 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
174 /* A hash table storing all CONST_DOUBLEs. */
175 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
181 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
183 /* A hash table storing all CONST_FIXEDs. */
184 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
186 static hashval_t hash (rtx x);
187 static bool equal (rtx x, rtx y);
190 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
192 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
193 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
194 #define first_label_num (crtl->emit.x_first_label_num)
196 static void set_used_decls (tree);
197 static void mark_label_nuses (rtx);
198 #if TARGET_SUPPORTS_WIDE_INT
199 static rtx lookup_const_wide_int (rtx);
200 #endif
201 static rtx lookup_const_double (rtx);
202 static rtx lookup_const_fixed (rtx);
203 static rtx gen_const_vector (machine_mode, int);
204 static void copy_rtx_if_shared_1 (rtx *orig);
206 /* Probability of the conditional branch currently proceeded by try_split. */
207 profile_probability split_branch_probability;
209 /* Returns a hash code for X (which is a really a CONST_INT). */
211 hashval_t
212 const_int_hasher::hash (rtx x)
214 return (hashval_t) INTVAL (x);
217 /* Returns nonzero if the value represented by X (which is really a
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
221 bool
222 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
224 return (INTVAL (x) == y);
227 #if TARGET_SUPPORTS_WIDE_INT
228 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
230 hashval_t
231 const_wide_int_hasher::hash (rtx x)
233 int i;
234 unsigned HOST_WIDE_INT hash = 0;
235 const_rtx xr = x;
237 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
238 hash += CONST_WIDE_INT_ELT (xr, i);
240 return (hashval_t) hash;
243 /* Returns nonzero if the value represented by X (which is really a
244 CONST_WIDE_INT) is the same as that given by Y (which is really a
245 CONST_WIDE_INT). */
247 bool
248 const_wide_int_hasher::equal (rtx x, rtx y)
250 int i;
251 const_rtx xr = x;
252 const_rtx yr = y;
253 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
254 return false;
256 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
257 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
258 return false;
260 return true;
262 #endif
264 /* Returns a hash code for CONST_POLY_INT X. */
266 hashval_t
267 const_poly_int_hasher::hash (rtx x)
269 inchash::hash h;
270 h.add_int (GET_MODE (x));
271 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
272 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
273 return h.end ();
276 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
278 bool
279 const_poly_int_hasher::equal (rtx x, const compare_type &y)
281 if (GET_MODE (x) != y.first)
282 return false;
283 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
284 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
285 return false;
286 return true;
289 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
290 hashval_t
291 const_double_hasher::hash (rtx x)
293 const_rtx const value = x;
294 hashval_t h;
296 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
297 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
298 else
300 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
304 return h;
307 /* Returns nonzero if the value represented by X (really a ...)
308 is the same as that represented by Y (really a ...) */
309 bool
310 const_double_hasher::equal (rtx x, rtx y)
312 const_rtx const a = x, b = y;
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
316 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
317 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
318 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
319 else
320 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
321 CONST_DOUBLE_REAL_VALUE (b));
324 /* Returns a hash code for X (which is really a CONST_FIXED). */
326 hashval_t
327 const_fixed_hasher::hash (rtx x)
329 const_rtx const value = x;
330 hashval_t h;
332 h = fixed_hash (CONST_FIXED_VALUE (value));
333 /* MODE is used in the comparison, so it should be in the hash. */
334 h ^= GET_MODE (value);
335 return h;
338 /* Returns nonzero if the value represented by X is the same as that
339 represented by Y. */
341 bool
342 const_fixed_hasher::equal (rtx x, rtx y)
344 const_rtx const a = x, b = y;
346 if (GET_MODE (a) != GET_MODE (b))
347 return 0;
348 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
351 /* Return true if the given memory attributes are equal. */
353 bool
354 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
356 if (p == q)
357 return true;
358 if (!p || !q)
359 return false;
360 return (p->alias == q->alias
361 && p->offset_known_p == q->offset_known_p
362 && (!p->offset_known_p || known_eq (p->offset, q->offset))
363 && p->size_known_p == q->size_known_p
364 && (!p->size_known_p || known_eq (p->size, q->size))
365 && p->align == q->align
366 && p->addrspace == q->addrspace
367 && (p->expr == q->expr
368 || (p->expr != NULL_TREE && q->expr != NULL_TREE
369 && operand_equal_p (p->expr, q->expr, 0))));
372 /* Set MEM's memory attributes so that they are the same as ATTRS. */
374 static void
375 set_mem_attrs (rtx mem, mem_attrs *attrs)
377 /* If everything is the default, we can just clear the attributes. */
378 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
380 MEM_ATTRS (mem) = 0;
381 return;
384 if (!MEM_ATTRS (mem)
385 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
387 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
388 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
392 /* Returns a hash code for X (which is a really a reg_attrs *). */
394 hashval_t
395 reg_attr_hasher::hash (reg_attrs *x)
397 const reg_attrs *const p = x;
399 inchash::hash h;
400 h.add_ptr (p->decl);
401 h.add_poly_hwi (p->offset);
402 return h.end ();
405 /* Returns nonzero if the value represented by X is the same as that given by
406 Y. */
408 bool
409 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
411 const reg_attrs *const p = x;
412 const reg_attrs *const q = y;
414 return (p->decl == q->decl && known_eq (p->offset, q->offset));
416 /* Allocate a new reg_attrs structure and insert it into the hash table if
417 one identical to it is not already in the table. We are doing this for
418 MEM of mode MODE. */
420 static reg_attrs *
421 get_reg_attrs (tree decl, poly_int64 offset)
423 reg_attrs attrs;
425 /* If everything is the default, we can just return zero. */
426 if (decl == 0 && known_eq (offset, 0))
427 return 0;
429 attrs.decl = decl;
430 attrs.offset = offset;
432 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
433 if (*slot == 0)
435 *slot = ggc_alloc<reg_attrs> ();
436 memcpy (*slot, &attrs, sizeof (reg_attrs));
439 return *slot;
443 #if !HAVE_blockage
444 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
445 and to block register equivalences to be seen across this insn. */
448 gen_blockage (void)
450 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
451 MEM_VOLATILE_P (x) = true;
452 return x;
454 #endif
457 /* Set the mode and register number of X to MODE and REGNO. */
459 void
460 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
462 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
463 ? hard_regno_nregs (regno, mode)
464 : 1);
465 PUT_MODE_RAW (x, mode);
466 set_regno_raw (x, regno, nregs);
469 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
470 don't attempt to share with the various global pieces of rtl (such as
471 frame_pointer_rtx). */
474 gen_raw_REG (machine_mode mode, unsigned int regno)
476 rtx x = rtx_alloc (REG MEM_STAT_INFO);
477 set_mode_and_regno (x, mode, regno);
478 REG_ATTRS (x) = NULL;
479 ORIGINAL_REGNO (x) = regno;
480 return x;
483 /* There are some RTL codes that require special attention; the generation
484 functions do the raw handling. If you add to this list, modify
485 special_rtx in gengenrtl.c as well. */
487 rtx_expr_list *
488 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
490 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
491 expr_list));
494 rtx_insn_list *
495 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
497 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
498 insn_list));
501 rtx_insn *
502 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
503 basic_block bb, rtx pattern, int location, int code,
504 rtx reg_notes)
506 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
507 prev_insn, next_insn,
508 bb, pattern, location, code,
509 reg_notes));
513 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
515 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
516 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
518 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
519 if (const_true_rtx && arg == STORE_FLAG_VALUE)
520 return const_true_rtx;
521 #endif
523 /* Look up the CONST_INT in the hash table. */
524 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
525 INSERT);
526 if (*slot == 0)
527 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
529 return *slot;
533 gen_int_mode (poly_int64 c, machine_mode mode)
535 c = trunc_int_for_mode (c, mode);
536 if (c.is_constant ())
537 return GEN_INT (c.coeffs[0]);
538 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
539 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
542 /* CONST_DOUBLEs might be created from pairs of integers, or from
543 REAL_VALUE_TYPEs. Also, their length is known only at run time,
544 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
546 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
547 hash table. If so, return its counterpart; otherwise add it
548 to the hash table and return it. */
549 static rtx
550 lookup_const_double (rtx real)
552 rtx *slot = const_double_htab->find_slot (real, INSERT);
553 if (*slot == 0)
554 *slot = real;
556 return *slot;
559 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
560 VALUE in mode MODE. */
562 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
564 rtx real = rtx_alloc (CONST_DOUBLE);
565 PUT_MODE (real, mode);
567 real->u.rv = value;
569 return lookup_const_double (real);
572 /* Determine whether FIXED, a CONST_FIXED, already exists in the
573 hash table. If so, return its counterpart; otherwise add it
574 to the hash table and return it. */
576 static rtx
577 lookup_const_fixed (rtx fixed)
579 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
580 if (*slot == 0)
581 *slot = fixed;
583 return *slot;
586 /* Return a CONST_FIXED rtx for a fixed-point value specified by
587 VALUE in mode MODE. */
590 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
592 rtx fixed = rtx_alloc (CONST_FIXED);
593 PUT_MODE (fixed, mode);
595 fixed->u.fv = value;
597 return lookup_const_fixed (fixed);
600 #if TARGET_SUPPORTS_WIDE_INT == 0
601 /* Constructs double_int from rtx CST. */
603 double_int
604 rtx_to_double_int (const_rtx cst)
606 double_int r;
608 if (CONST_INT_P (cst))
609 r = double_int::from_shwi (INTVAL (cst));
610 else if (CONST_DOUBLE_AS_INT_P (cst))
612 r.low = CONST_DOUBLE_LOW (cst);
613 r.high = CONST_DOUBLE_HIGH (cst);
615 else
616 gcc_unreachable ();
618 return r;
620 #endif
622 #if TARGET_SUPPORTS_WIDE_INT
623 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
624 If so, return its counterpart; otherwise add it to the hash table and
625 return it. */
627 static rtx
628 lookup_const_wide_int (rtx wint)
630 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
631 if (*slot == 0)
632 *slot = wint;
634 return *slot;
636 #endif
638 /* Return an rtx constant for V, given that the constant has mode MODE.
639 The returned rtx will be a CONST_INT if V fits, otherwise it will be
640 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
641 (if TARGET_SUPPORTS_WIDE_INT). */
643 static rtx
644 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
646 unsigned int len = v.get_len ();
647 /* Not scalar_int_mode because we also allow pointer bound modes. */
648 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
650 /* Allow truncation but not extension since we do not know if the
651 number is signed or unsigned. */
652 gcc_assert (prec <= v.get_precision ());
654 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
655 return gen_int_mode (v.elt (0), mode);
657 #if TARGET_SUPPORTS_WIDE_INT
659 unsigned int i;
660 rtx value;
661 unsigned int blocks_needed
662 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
664 if (len > blocks_needed)
665 len = blocks_needed;
667 value = const_wide_int_alloc (len);
669 /* It is so tempting to just put the mode in here. Must control
670 myself ... */
671 PUT_MODE (value, VOIDmode);
672 CWI_PUT_NUM_ELEM (value, len);
674 for (i = 0; i < len; i++)
675 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
677 return lookup_const_wide_int (value);
679 #else
680 return immed_double_const (v.elt (0), v.elt (1), mode);
681 #endif
684 #if TARGET_SUPPORTS_WIDE_INT == 0
685 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
686 of ints: I0 is the low-order word and I1 is the high-order word.
687 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
688 implied upper bits are copies of the high bit of i1. The value
689 itself is neither signed nor unsigned. Do not use this routine for
690 non-integer modes; convert to REAL_VALUE_TYPE and use
691 const_double_from_real_value. */
694 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
696 rtx value;
697 unsigned int i;
699 /* There are the following cases (note that there are no modes with
700 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
702 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
703 gen_int_mode.
704 2) If the value of the integer fits into HOST_WIDE_INT anyway
705 (i.e., i1 consists only from copies of the sign bit, and sign
706 of i0 and i1 are the same), then we return a CONST_INT for i0.
707 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
708 scalar_mode smode;
709 if (is_a <scalar_mode> (mode, &smode)
710 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
711 return gen_int_mode (i0, mode);
713 /* If this integer fits in one word, return a CONST_INT. */
714 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
715 return GEN_INT (i0);
717 /* We use VOIDmode for integers. */
718 value = rtx_alloc (CONST_DOUBLE);
719 PUT_MODE (value, VOIDmode);
721 CONST_DOUBLE_LOW (value) = i0;
722 CONST_DOUBLE_HIGH (value) = i1;
724 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
725 XWINT (value, i) = 0;
727 return lookup_const_double (value);
729 #endif
731 /* Return an rtx representation of C in mode MODE. */
734 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
736 if (c.is_constant ())
737 return immed_wide_int_const_1 (c.coeffs[0], mode);
739 /* Not scalar_int_mode because we also allow pointer bound modes. */
740 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
742 /* Allow truncation but not extension since we do not know if the
743 number is signed or unsigned. */
744 gcc_assert (prec <= c.coeffs[0].get_precision ());
745 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
747 /* See whether we already have an rtx for this constant. */
748 inchash::hash h;
749 h.add_int (mode);
750 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
751 h.add_wide_int (newc.coeffs[i]);
752 const_poly_int_hasher::compare_type typed_value (mode, newc);
753 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
754 h.end (), INSERT);
755 rtx x = *slot;
756 if (x)
757 return x;
759 /* Create a new rtx. There's a choice to be made here between installing
760 the actual mode of the rtx or leaving it as VOIDmode (for consistency
761 with CONST_INT). In practice the handling of the codes is different
762 enough that we get no benefit from using VOIDmode, and various places
763 assume that VOIDmode implies CONST_INT. Using the real mode seems like
764 the right long-term direction anyway. */
765 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
766 size_t extra_size = twi::extra_size (prec);
767 x = rtx_alloc_v (CONST_POLY_INT,
768 sizeof (struct const_poly_int_def) + extra_size);
769 PUT_MODE (x, mode);
770 CONST_POLY_INT_COEFFS (x).set_precision (prec);
771 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
772 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
774 *slot = x;
775 return x;
779 gen_rtx_REG (machine_mode mode, unsigned int regno)
781 /* In case the MD file explicitly references the frame pointer, have
782 all such references point to the same frame pointer. This is
783 used during frame pointer elimination to distinguish the explicit
784 references to these registers from pseudos that happened to be
785 assigned to them.
787 If we have eliminated the frame pointer or arg pointer, we will
788 be using it as a normal register, for example as a spill
789 register. In such cases, we might be accessing it in a mode that
790 is not Pmode and therefore cannot use the pre-allocated rtx.
792 Also don't do this when we are making new REGs in reload, since
793 we don't want to get confused with the real pointers. */
795 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
797 if (regno == FRAME_POINTER_REGNUM
798 && (!reload_completed || frame_pointer_needed))
799 return frame_pointer_rtx;
801 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
802 && regno == HARD_FRAME_POINTER_REGNUM
803 && (!reload_completed || frame_pointer_needed))
804 return hard_frame_pointer_rtx;
805 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
806 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
807 && regno == ARG_POINTER_REGNUM)
808 return arg_pointer_rtx;
809 #endif
810 #ifdef RETURN_ADDRESS_POINTER_REGNUM
811 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
812 return return_address_pointer_rtx;
813 #endif
814 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
815 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
816 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
817 return pic_offset_table_rtx;
818 if (regno == STACK_POINTER_REGNUM)
819 return stack_pointer_rtx;
822 #if 0
823 /* If the per-function register table has been set up, try to re-use
824 an existing entry in that table to avoid useless generation of RTL.
826 This code is disabled for now until we can fix the various backends
827 which depend on having non-shared hard registers in some cases. Long
828 term we want to re-enable this code as it can significantly cut down
829 on the amount of useless RTL that gets generated.
831 We'll also need to fix some code that runs after reload that wants to
832 set ORIGINAL_REGNO. */
834 if (cfun
835 && cfun->emit
836 && regno_reg_rtx
837 && regno < FIRST_PSEUDO_REGISTER
838 && reg_raw_mode[regno] == mode)
839 return regno_reg_rtx[regno];
840 #endif
842 return gen_raw_REG (mode, regno);
846 gen_rtx_MEM (machine_mode mode, rtx addr)
848 rtx rt = gen_rtx_raw_MEM (mode, addr);
850 /* This field is not cleared by the mere allocation of the rtx, so
851 we clear it here. */
852 MEM_ATTRS (rt) = 0;
854 return rt;
857 /* Generate a memory referring to non-trapping constant memory. */
860 gen_const_mem (machine_mode mode, rtx addr)
862 rtx mem = gen_rtx_MEM (mode, addr);
863 MEM_READONLY_P (mem) = 1;
864 MEM_NOTRAP_P (mem) = 1;
865 return mem;
868 /* Generate a MEM referring to fixed portions of the frame, e.g., register
869 save areas. */
872 gen_frame_mem (machine_mode mode, rtx addr)
874 rtx mem = gen_rtx_MEM (mode, addr);
875 MEM_NOTRAP_P (mem) = 1;
876 set_mem_alias_set (mem, get_frame_alias_set ());
877 return mem;
880 /* Generate a MEM referring to a temporary use of the stack, not part
881 of the fixed stack frame. For example, something which is pushed
882 by a target splitter. */
884 gen_tmp_stack_mem (machine_mode mode, rtx addr)
886 rtx mem = gen_rtx_MEM (mode, addr);
887 MEM_NOTRAP_P (mem) = 1;
888 if (!cfun->calls_alloca)
889 set_mem_alias_set (mem, get_frame_alias_set ());
890 return mem;
893 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
894 this construct would be valid, and false otherwise. */
896 bool
897 validate_subreg (machine_mode omode, machine_mode imode,
898 const_rtx reg, poly_uint64 offset)
900 poly_uint64 isize = GET_MODE_SIZE (imode);
901 poly_uint64 osize = GET_MODE_SIZE (omode);
903 /* The sizes must be ordered, so that we know whether the subreg
904 is partial, paradoxical or complete. */
905 if (!ordered_p (isize, osize))
906 return false;
908 /* All subregs must be aligned. */
909 if (!multiple_p (offset, osize))
910 return false;
912 /* The subreg offset cannot be outside the inner object. */
913 if (maybe_ge (offset, isize))
914 return false;
916 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
918 /* ??? This should not be here. Temporarily continue to allow word_mode
919 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
920 Generally, backends are doing something sketchy but it'll take time to
921 fix them all. */
922 if (omode == word_mode)
924 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
925 is the culprit here, and not the backends. */
926 else if (known_ge (osize, regsize) && known_ge (isize, osize))
928 /* Allow component subregs of complex and vector. Though given the below
929 extraction rules, it's not always clear what that means. */
930 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
931 && GET_MODE_INNER (imode) == omode)
933 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
934 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
935 represent this. It's questionable if this ought to be represented at
936 all -- why can't this all be hidden in post-reload splitters that make
937 arbitrarily mode changes to the registers themselves. */
938 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
940 /* Subregs involving floating point modes are not allowed to
941 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
942 (subreg:SI (reg:DF) 0) isn't. */
943 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
945 if (! (known_eq (isize, osize)
946 /* LRA can use subreg to store a floating point value in
947 an integer mode. Although the floating point and the
948 integer modes need the same number of hard registers,
949 the size of floating point mode can be less than the
950 integer mode. LRA also uses subregs for a register
951 should be used in different mode in on insn. */
952 || lra_in_progress))
953 return false;
956 /* Paradoxical subregs must have offset zero. */
957 if (maybe_gt (osize, isize))
958 return known_eq (offset, 0U);
960 /* This is a normal subreg. Verify that the offset is representable. */
962 /* For hard registers, we already have most of these rules collected in
963 subreg_offset_representable_p. */
964 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
966 unsigned int regno = REGNO (reg);
968 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
969 && GET_MODE_INNER (imode) == omode)
971 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
972 return false;
974 return subreg_offset_representable_p (regno, imode, offset, omode);
977 /* The outer size must be ordered wrt the register size, otherwise
978 we wouldn't know at compile time how many registers the outer
979 mode occupies. */
980 if (!ordered_p (osize, regsize))
981 return false;
983 /* For pseudo registers, we want most of the same checks. Namely:
985 Assume that the pseudo register will be allocated to hard registers
986 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
987 the remainder must correspond to the lowpart of the containing hard
988 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
989 otherwise it is at the lowest offset.
991 Given that we've already checked the mode and offset alignment,
992 we only have to check subblock subregs here. */
993 if (maybe_lt (osize, regsize)
994 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
996 /* It is invalid for the target to pick a register size for a mode
997 that isn't ordered wrt to the size of that mode. */
998 poly_uint64 block_size = ordered_min (isize, regsize);
999 unsigned int start_reg;
1000 poly_uint64 offset_within_reg;
1001 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1002 || (BYTES_BIG_ENDIAN
1003 ? maybe_ne (offset_within_reg, block_size - osize)
1004 : maybe_ne (offset_within_reg, 0U)))
1005 return false;
1007 return true;
1011 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1013 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1014 return gen_rtx_raw_SUBREG (mode, reg, offset);
1017 /* Generate a SUBREG representing the least-significant part of REG if MODE
1018 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1021 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1023 machine_mode inmode;
1025 inmode = GET_MODE (reg);
1026 if (inmode == VOIDmode)
1027 inmode = mode;
1028 return gen_rtx_SUBREG (mode, reg,
1029 subreg_lowpart_offset (mode, inmode));
1033 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1034 enum var_init_status status)
1036 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1037 PAT_VAR_LOCATION_STATUS (x) = status;
1038 return x;
1042 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1044 rtvec
1045 gen_rtvec (int n, ...)
1047 int i;
1048 rtvec rt_val;
1049 va_list p;
1051 va_start (p, n);
1053 /* Don't allocate an empty rtvec... */
1054 if (n == 0)
1056 va_end (p);
1057 return NULL_RTVEC;
1060 rt_val = rtvec_alloc (n);
1062 for (i = 0; i < n; i++)
1063 rt_val->elem[i] = va_arg (p, rtx);
1065 va_end (p);
1066 return rt_val;
1069 rtvec
1070 gen_rtvec_v (int n, rtx *argp)
1072 int i;
1073 rtvec rt_val;
1075 /* Don't allocate an empty rtvec... */
1076 if (n == 0)
1077 return NULL_RTVEC;
1079 rt_val = rtvec_alloc (n);
1081 for (i = 0; i < n; i++)
1082 rt_val->elem[i] = *argp++;
1084 return rt_val;
1087 rtvec
1088 gen_rtvec_v (int n, rtx_insn **argp)
1090 int i;
1091 rtvec rt_val;
1093 /* Don't allocate an empty rtvec... */
1094 if (n == 0)
1095 return NULL_RTVEC;
1097 rt_val = rtvec_alloc (n);
1099 for (i = 0; i < n; i++)
1100 rt_val->elem[i] = *argp++;
1102 return rt_val;
1106 /* Return the number of bytes between the start of an OUTER_MODE
1107 in-memory value and the start of an INNER_MODE in-memory value,
1108 given that the former is a lowpart of the latter. It may be a
1109 paradoxical lowpart, in which case the offset will be negative
1110 on big-endian targets. */
1112 poly_int64
1113 byte_lowpart_offset (machine_mode outer_mode,
1114 machine_mode inner_mode)
1116 if (paradoxical_subreg_p (outer_mode, inner_mode))
1117 return -subreg_lowpart_offset (inner_mode, outer_mode);
1118 else
1119 return subreg_lowpart_offset (outer_mode, inner_mode);
1122 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1123 from address X. For paradoxical big-endian subregs this is a
1124 negative value, otherwise it's the same as OFFSET. */
1126 poly_int64
1127 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1128 poly_uint64 offset)
1130 if (paradoxical_subreg_p (outer_mode, inner_mode))
1132 gcc_assert (known_eq (offset, 0U));
1133 return -subreg_lowpart_offset (inner_mode, outer_mode);
1135 return offset;
1138 /* As above, but return the offset that existing subreg X would have
1139 if SUBREG_REG (X) were stored in memory. The only significant thing
1140 about the current SUBREG_REG is its mode. */
1142 poly_int64
1143 subreg_memory_offset (const_rtx x)
1145 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1146 SUBREG_BYTE (x));
1149 /* Generate a REG rtx for a new pseudo register of mode MODE.
1150 This pseudo is assigned the next sequential register number. */
1153 gen_reg_rtx (machine_mode mode)
1155 rtx val;
1156 unsigned int align = GET_MODE_ALIGNMENT (mode);
1158 gcc_assert (can_create_pseudo_p ());
1160 /* If a virtual register with bigger mode alignment is generated,
1161 increase stack alignment estimation because it might be spilled
1162 to stack later. */
1163 if (SUPPORTS_STACK_ALIGNMENT
1164 && crtl->stack_alignment_estimated < align
1165 && !crtl->stack_realign_processed)
1167 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1168 if (crtl->stack_alignment_estimated < min_align)
1169 crtl->stack_alignment_estimated = min_align;
1172 if (generating_concat_p
1173 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1174 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1176 /* For complex modes, don't make a single pseudo.
1177 Instead, make a CONCAT of two pseudos.
1178 This allows noncontiguous allocation of the real and imaginary parts,
1179 which makes much better code. Besides, allocating DCmode
1180 pseudos overstrains reload on some machines like the 386. */
1181 rtx realpart, imagpart;
1182 machine_mode partmode = GET_MODE_INNER (mode);
1184 realpart = gen_reg_rtx (partmode);
1185 imagpart = gen_reg_rtx (partmode);
1186 return gen_rtx_CONCAT (mode, realpart, imagpart);
1189 /* Do not call gen_reg_rtx with uninitialized crtl. */
1190 gcc_assert (crtl->emit.regno_pointer_align_length);
1192 crtl->emit.ensure_regno_capacity ();
1193 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1195 val = gen_raw_REG (mode, reg_rtx_no);
1196 regno_reg_rtx[reg_rtx_no++] = val;
1197 return val;
1200 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1201 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1203 void
1204 emit_status::ensure_regno_capacity ()
1206 int old_size = regno_pointer_align_length;
1208 if (reg_rtx_no < old_size)
1209 return;
1211 int new_size = old_size * 2;
1212 while (reg_rtx_no >= new_size)
1213 new_size *= 2;
1215 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1216 memset (tmp + old_size, 0, new_size - old_size);
1217 regno_pointer_align = (unsigned char *) tmp;
1219 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1220 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1221 regno_reg_rtx = new1;
1223 crtl->emit.regno_pointer_align_length = new_size;
1226 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1228 bool
1229 reg_is_parm_p (rtx reg)
1231 tree decl;
1233 gcc_assert (REG_P (reg));
1234 decl = REG_EXPR (reg);
1235 return (decl && TREE_CODE (decl) == PARM_DECL);
1238 /* Update NEW with the same attributes as REG, but with OFFSET added
1239 to the REG_OFFSET. */
1241 static void
1242 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1244 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1245 REG_OFFSET (reg) + offset);
1248 /* Generate a register with same attributes as REG, but with OFFSET
1249 added to the REG_OFFSET. */
1252 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1253 poly_int64 offset)
1255 rtx new_rtx = gen_rtx_REG (mode, regno);
1257 update_reg_offset (new_rtx, reg, offset);
1258 return new_rtx;
1261 /* Generate a new pseudo-register with the same attributes as REG, but
1262 with OFFSET added to the REG_OFFSET. */
1265 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1267 rtx new_rtx = gen_reg_rtx (mode);
1269 update_reg_offset (new_rtx, reg, offset);
1270 return new_rtx;
1273 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1274 new register is a (possibly paradoxical) lowpart of the old one. */
1276 void
1277 adjust_reg_mode (rtx reg, machine_mode mode)
1279 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1280 PUT_MODE (reg, mode);
1283 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1284 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1286 void
1287 set_reg_attrs_from_value (rtx reg, rtx x)
1289 poly_int64 offset;
1290 bool can_be_reg_pointer = true;
1292 /* Don't call mark_reg_pointer for incompatible pointer sign
1293 extension. */
1294 while (GET_CODE (x) == SIGN_EXTEND
1295 || GET_CODE (x) == ZERO_EXTEND
1296 || GET_CODE (x) == TRUNCATE
1297 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1299 #if defined(POINTERS_EXTEND_UNSIGNED)
1300 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1301 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1302 || (paradoxical_subreg_p (x)
1303 && ! (SUBREG_PROMOTED_VAR_P (x)
1304 && SUBREG_CHECK_PROMOTED_SIGN (x,
1305 POINTERS_EXTEND_UNSIGNED))))
1306 && !targetm.have_ptr_extend ())
1307 can_be_reg_pointer = false;
1308 #endif
1309 x = XEXP (x, 0);
1312 /* Hard registers can be reused for multiple purposes within the same
1313 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1314 on them is wrong. */
1315 if (HARD_REGISTER_P (reg))
1316 return;
1318 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1319 if (MEM_P (x))
1321 if (MEM_OFFSET_KNOWN_P (x))
1322 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1323 MEM_OFFSET (x) + offset);
1324 if (can_be_reg_pointer && MEM_POINTER (x))
1325 mark_reg_pointer (reg, 0);
1327 else if (REG_P (x))
1329 if (REG_ATTRS (x))
1330 update_reg_offset (reg, x, offset);
1331 if (can_be_reg_pointer && REG_POINTER (x))
1332 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1336 /* Generate a REG rtx for a new pseudo register, copying the mode
1337 and attributes from X. */
1340 gen_reg_rtx_and_attrs (rtx x)
1342 rtx reg = gen_reg_rtx (GET_MODE (x));
1343 set_reg_attrs_from_value (reg, x);
1344 return reg;
1347 /* Set the register attributes for registers contained in PARM_RTX.
1348 Use needed values from memory attributes of MEM. */
1350 void
1351 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1353 if (REG_P (parm_rtx))
1354 set_reg_attrs_from_value (parm_rtx, mem);
1355 else if (GET_CODE (parm_rtx) == PARALLEL)
1357 /* Check for a NULL entry in the first slot, used to indicate that the
1358 parameter goes both on the stack and in registers. */
1359 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1360 for (; i < XVECLEN (parm_rtx, 0); i++)
1362 rtx x = XVECEXP (parm_rtx, 0, i);
1363 if (REG_P (XEXP (x, 0)))
1364 REG_ATTRS (XEXP (x, 0))
1365 = get_reg_attrs (MEM_EXPR (mem),
1366 INTVAL (XEXP (x, 1)));
1371 /* Set the REG_ATTRS for registers in value X, given that X represents
1372 decl T. */
1374 void
1375 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1377 if (!t)
1378 return;
1379 tree tdecl = t;
1380 if (GET_CODE (x) == SUBREG)
1382 gcc_assert (subreg_lowpart_p (x));
1383 x = SUBREG_REG (x);
1385 if (REG_P (x))
1386 REG_ATTRS (x)
1387 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1388 DECL_P (tdecl)
1389 ? DECL_MODE (tdecl)
1390 : TYPE_MODE (TREE_TYPE (tdecl))));
1391 if (GET_CODE (x) == CONCAT)
1393 if (REG_P (XEXP (x, 0)))
1394 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1395 if (REG_P (XEXP (x, 1)))
1396 REG_ATTRS (XEXP (x, 1))
1397 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1399 if (GET_CODE (x) == PARALLEL)
1401 int i, start;
1403 /* Check for a NULL entry, used to indicate that the parameter goes
1404 both on the stack and in registers. */
1405 if (XEXP (XVECEXP (x, 0, 0), 0))
1406 start = 0;
1407 else
1408 start = 1;
1410 for (i = start; i < XVECLEN (x, 0); i++)
1412 rtx y = XVECEXP (x, 0, i);
1413 if (REG_P (XEXP (y, 0)))
1414 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1419 /* Assign the RTX X to declaration T. */
1421 void
1422 set_decl_rtl (tree t, rtx x)
1424 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1425 if (x)
1426 set_reg_attrs_for_decl_rtl (t, x);
1429 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1430 if the ABI requires the parameter to be passed by reference. */
1432 void
1433 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1435 DECL_INCOMING_RTL (t) = x;
1436 if (x && !by_reference_p)
1437 set_reg_attrs_for_decl_rtl (t, x);
1440 /* Identify REG (which may be a CONCAT) as a user register. */
1442 void
1443 mark_user_reg (rtx reg)
1445 if (GET_CODE (reg) == CONCAT)
1447 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1448 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1450 else
1452 gcc_assert (REG_P (reg));
1453 REG_USERVAR_P (reg) = 1;
1457 /* Identify REG as a probable pointer register and show its alignment
1458 as ALIGN, if nonzero. */
1460 void
1461 mark_reg_pointer (rtx reg, int align)
1463 if (! REG_POINTER (reg))
1465 REG_POINTER (reg) = 1;
1467 if (align)
1468 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1470 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1471 /* We can no-longer be sure just how aligned this pointer is. */
1472 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1475 /* Return 1 plus largest pseudo reg number used in the current function. */
1478 max_reg_num (void)
1480 return reg_rtx_no;
1483 /* Return 1 + the largest label number used so far in the current function. */
1486 max_label_num (void)
1488 return label_num;
1491 /* Return first label number used in this function (if any were used). */
1494 get_first_label_num (void)
1496 return first_label_num;
1499 /* If the rtx for label was created during the expansion of a nested
1500 function, then first_label_num won't include this label number.
1501 Fix this now so that array indices work later. */
1503 void
1504 maybe_set_first_label_num (rtx_code_label *x)
1506 if (CODE_LABEL_NUMBER (x) < first_label_num)
1507 first_label_num = CODE_LABEL_NUMBER (x);
1510 /* For use by the RTL function loader, when mingling with normal
1511 functions.
1512 Ensure that label_num is greater than the label num of X, to avoid
1513 duplicate labels in the generated assembler. */
1515 void
1516 maybe_set_max_label_num (rtx_code_label *x)
1518 if (CODE_LABEL_NUMBER (x) >= label_num)
1519 label_num = CODE_LABEL_NUMBER (x) + 1;
1523 /* Return a value representing some low-order bits of X, where the number
1524 of low-order bits is given by MODE. Note that no conversion is done
1525 between floating-point and fixed-point values, rather, the bit
1526 representation is returned.
1528 This function handles the cases in common between gen_lowpart, below,
1529 and two variants in cse.c and combine.c. These are the cases that can
1530 be safely handled at all points in the compilation.
1532 If this is not a case we can handle, return 0. */
1535 gen_lowpart_common (machine_mode mode, rtx x)
1537 poly_uint64 msize = GET_MODE_SIZE (mode);
1538 machine_mode innermode;
1540 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1541 so we have to make one up. Yuk. */
1542 innermode = GET_MODE (x);
1543 if (CONST_INT_P (x)
1544 && known_le (msize * BITS_PER_UNIT,
1545 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1546 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1547 else if (innermode == VOIDmode)
1548 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1550 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1552 if (innermode == mode)
1553 return x;
1555 /* The size of the outer and inner modes must be ordered. */
1556 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1557 if (!ordered_p (msize, xsize))
1558 return 0;
1560 if (SCALAR_FLOAT_MODE_P (mode))
1562 /* Don't allow paradoxical FLOAT_MODE subregs. */
1563 if (maybe_gt (msize, xsize))
1564 return 0;
1566 else
1568 /* MODE must occupy no more of the underlying registers than X. */
1569 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1570 unsigned int mregs, xregs;
1571 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1572 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1573 || mregs > xregs)
1574 return 0;
1577 scalar_int_mode int_mode, int_innermode, from_mode;
1578 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1579 && is_a <scalar_int_mode> (mode, &int_mode)
1580 && is_a <scalar_int_mode> (innermode, &int_innermode)
1581 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1583 /* If we are getting the low-order part of something that has been
1584 sign- or zero-extended, we can either just use the object being
1585 extended or make a narrower extension. If we want an even smaller
1586 piece than the size of the object being extended, call ourselves
1587 recursively.
1589 This case is used mostly by combine and cse. */
1591 if (from_mode == int_mode)
1592 return XEXP (x, 0);
1593 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1594 return gen_lowpart_common (int_mode, XEXP (x, 0));
1595 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1596 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1598 else if (GET_CODE (x) == SUBREG || REG_P (x)
1599 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1600 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1601 || CONST_POLY_INT_P (x))
1602 return lowpart_subreg (mode, x, innermode);
1604 /* Otherwise, we can't do this. */
1605 return 0;
1609 gen_highpart (machine_mode mode, rtx x)
1611 poly_uint64 msize = GET_MODE_SIZE (mode);
1612 rtx result;
1614 /* This case loses if X is a subreg. To catch bugs early,
1615 complain if an invalid MODE is used even in other cases. */
1616 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1617 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1619 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1620 subreg_highpart_offset (mode, GET_MODE (x)));
1621 gcc_assert (result);
1623 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1624 the target if we have a MEM. gen_highpart must return a valid operand,
1625 emitting code if necessary to do so. */
1626 if (MEM_P (result))
1628 result = validize_mem (result);
1629 gcc_assert (result);
1632 return result;
1635 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1636 be VOIDmode constant. */
1638 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1640 if (GET_MODE (exp) != VOIDmode)
1642 gcc_assert (GET_MODE (exp) == innermode);
1643 return gen_highpart (outermode, exp);
1645 return simplify_gen_subreg (outermode, exp, innermode,
1646 subreg_highpart_offset (outermode, innermode));
1649 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1650 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1652 poly_uint64
1653 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1655 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1656 if (maybe_gt (outer_bytes, inner_bytes))
1657 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1658 return 0;
1660 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1661 return inner_bytes - outer_bytes;
1662 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1663 return 0;
1664 else
1665 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1668 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1669 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1671 poly_uint64
1672 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1674 gcc_assert (known_ge (inner_bytes, outer_bytes));
1676 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1677 return 0;
1678 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1679 return inner_bytes - outer_bytes;
1680 else
1681 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1682 (inner_bytes - outer_bytes)
1683 * BITS_PER_UNIT);
1686 /* Return 1 iff X, assumed to be a SUBREG,
1687 refers to the least significant part of its containing reg.
1688 If X is not a SUBREG, always return 1 (it is its own low part!). */
1691 subreg_lowpart_p (const_rtx x)
1693 if (GET_CODE (x) != SUBREG)
1694 return 1;
1695 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1696 return 0;
1698 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1699 GET_MODE (SUBREG_REG (x))),
1700 SUBREG_BYTE (x));
1703 /* Return subword OFFSET of operand OP.
1704 The word number, OFFSET, is interpreted as the word number starting
1705 at the low-order address. OFFSET 0 is the low-order word if not
1706 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1708 If we cannot extract the required word, we return zero. Otherwise,
1709 an rtx corresponding to the requested word will be returned.
1711 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1712 reload has completed, a valid address will always be returned. After
1713 reload, if a valid address cannot be returned, we return zero.
1715 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1716 it is the responsibility of the caller.
1718 MODE is the mode of OP in case it is a CONST_INT.
1720 ??? This is still rather broken for some cases. The problem for the
1721 moment is that all callers of this thing provide no 'goal mode' to
1722 tell us to work with. This exists because all callers were written
1723 in a word based SUBREG world.
1724 Now use of this function can be deprecated by simplify_subreg in most
1725 cases.
1729 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1730 machine_mode mode)
1732 if (mode == VOIDmode)
1733 mode = GET_MODE (op);
1735 gcc_assert (mode != VOIDmode);
1737 /* If OP is narrower than a word, fail. */
1738 if (mode != BLKmode
1739 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1740 return 0;
1742 /* If we want a word outside OP, return zero. */
1743 if (mode != BLKmode
1744 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1745 return const0_rtx;
1747 /* Form a new MEM at the requested address. */
1748 if (MEM_P (op))
1750 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1752 if (! validate_address)
1753 return new_rtx;
1755 else if (reload_completed)
1757 if (! strict_memory_address_addr_space_p (word_mode,
1758 XEXP (new_rtx, 0),
1759 MEM_ADDR_SPACE (op)))
1760 return 0;
1762 else
1763 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1766 /* Rest can be handled by simplify_subreg. */
1767 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1770 /* Similar to `operand_subword', but never return 0. If we can't
1771 extract the required subword, put OP into a register and try again.
1772 The second attempt must succeed. We always validate the address in
1773 this case.
1775 MODE is the mode of OP, in case it is CONST_INT. */
1778 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1780 rtx result = operand_subword (op, offset, 1, mode);
1782 if (result)
1783 return result;
1785 if (mode != BLKmode && mode != VOIDmode)
1787 /* If this is a register which cannot be accessed by words, copy it
1788 to a pseudo register. */
1789 if (REG_P (op))
1790 op = copy_to_reg (op);
1791 else
1792 op = force_reg (mode, op);
1795 result = operand_subword (op, offset, 1, mode);
1796 gcc_assert (result);
1798 return result;
1801 mem_attrs::mem_attrs ()
1802 : expr (NULL_TREE),
1803 offset (0),
1804 size (0),
1805 alias (0),
1806 align (0),
1807 addrspace (ADDR_SPACE_GENERIC),
1808 offset_known_p (false),
1809 size_known_p (false)
1812 /* Returns 1 if both MEM_EXPR can be considered equal
1813 and 0 otherwise. */
1816 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1818 if (expr1 == expr2)
1819 return 1;
1821 if (! expr1 || ! expr2)
1822 return 0;
1824 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1825 return 0;
1827 return operand_equal_p (expr1, expr2, 0);
1830 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1831 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1832 -1 if not known. */
1835 get_mem_align_offset (rtx mem, unsigned int align)
1837 tree expr;
1838 poly_uint64 offset;
1840 /* This function can't use
1841 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1842 || (MAX (MEM_ALIGN (mem),
1843 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1844 < align))
1845 return -1;
1846 else
1847 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1848 for two reasons:
1849 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1850 for <variable>. get_inner_reference doesn't handle it and
1851 even if it did, the alignment in that case needs to be determined
1852 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1853 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1854 isn't sufficiently aligned, the object it is in might be. */
1855 gcc_assert (MEM_P (mem));
1856 expr = MEM_EXPR (mem);
1857 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1858 return -1;
1860 offset = MEM_OFFSET (mem);
1861 if (DECL_P (expr))
1863 if (DECL_ALIGN (expr) < align)
1864 return -1;
1866 else if (INDIRECT_REF_P (expr))
1868 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1869 return -1;
1871 else if (TREE_CODE (expr) == COMPONENT_REF)
1873 while (1)
1875 tree inner = TREE_OPERAND (expr, 0);
1876 tree field = TREE_OPERAND (expr, 1);
1877 tree byte_offset = component_ref_field_offset (expr);
1878 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1880 poly_uint64 suboffset;
1881 if (!byte_offset
1882 || !poly_int_tree_p (byte_offset, &suboffset)
1883 || !tree_fits_uhwi_p (bit_offset))
1884 return -1;
1886 offset += suboffset;
1887 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1889 if (inner == NULL_TREE)
1891 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1892 < (unsigned int) align)
1893 return -1;
1894 break;
1896 else if (DECL_P (inner))
1898 if (DECL_ALIGN (inner) < align)
1899 return -1;
1900 break;
1902 else if (TREE_CODE (inner) != COMPONENT_REF)
1903 return -1;
1904 expr = inner;
1907 else
1908 return -1;
1910 HOST_WIDE_INT misalign;
1911 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1912 return -1;
1913 return misalign;
1916 /* Given REF (a MEM) and T, either the type of X or the expression
1917 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1918 if we are making a new object of this type. BITPOS is nonzero if
1919 there is an offset outstanding on T that will be applied later. */
1921 void
1922 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1923 poly_int64 bitpos)
1925 poly_int64 apply_bitpos = 0;
1926 tree type;
1927 class mem_attrs attrs, *defattrs, *refattrs;
1928 addr_space_t as;
1930 /* It can happen that type_for_mode was given a mode for which there
1931 is no language-level type. In which case it returns NULL, which
1932 we can see here. */
1933 if (t == NULL_TREE)
1934 return;
1936 type = TYPE_P (t) ? t : TREE_TYPE (t);
1937 if (type == error_mark_node)
1938 return;
1940 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1941 wrong answer, as it assumes that DECL_RTL already has the right alias
1942 info. Callers should not set DECL_RTL until after the call to
1943 set_mem_attributes. */
1944 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1946 /* Get the alias set from the expression or type (perhaps using a
1947 front-end routine) and use it. */
1948 attrs.alias = get_alias_set (t);
1950 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1951 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1953 /* Default values from pre-existing memory attributes if present. */
1954 refattrs = MEM_ATTRS (ref);
1955 if (refattrs)
1957 /* ??? Can this ever happen? Calling this routine on a MEM that
1958 already carries memory attributes should probably be invalid. */
1959 attrs.expr = refattrs->expr;
1960 attrs.offset_known_p = refattrs->offset_known_p;
1961 attrs.offset = refattrs->offset;
1962 attrs.size_known_p = refattrs->size_known_p;
1963 attrs.size = refattrs->size;
1964 attrs.align = refattrs->align;
1967 /* Otherwise, default values from the mode of the MEM reference. */
1968 else
1970 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1971 gcc_assert (!defattrs->expr);
1972 gcc_assert (!defattrs->offset_known_p);
1974 /* Respect mode size. */
1975 attrs.size_known_p = defattrs->size_known_p;
1976 attrs.size = defattrs->size;
1977 /* ??? Is this really necessary? We probably should always get
1978 the size from the type below. */
1980 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1981 if T is an object, always compute the object alignment below. */
1982 if (TYPE_P (t))
1983 attrs.align = defattrs->align;
1984 else
1985 attrs.align = BITS_PER_UNIT;
1986 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1987 e.g. if the type carries an alignment attribute. Should we be
1988 able to simply always use TYPE_ALIGN? */
1991 /* We can set the alignment from the type if we are making an object or if
1992 this is an INDIRECT_REF. */
1993 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1994 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1996 /* If the size is known, we can set that. */
1997 tree new_size = TYPE_SIZE_UNIT (type);
1999 /* The address-space is that of the type. */
2000 as = TYPE_ADDR_SPACE (type);
2002 /* If T is not a type, we may be able to deduce some more information about
2003 the expression. */
2004 if (! TYPE_P (t))
2006 tree base;
2008 if (TREE_THIS_VOLATILE (t))
2009 MEM_VOLATILE_P (ref) = 1;
2011 /* Now remove any conversions: they don't change what the underlying
2012 object is. Likewise for SAVE_EXPR. */
2013 while (CONVERT_EXPR_P (t)
2014 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2015 || TREE_CODE (t) == SAVE_EXPR)
2016 t = TREE_OPERAND (t, 0);
2018 /* Note whether this expression can trap. */
2019 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2021 base = get_base_address (t);
2022 if (base)
2024 if (DECL_P (base)
2025 && TREE_READONLY (base)
2026 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2027 && !TREE_THIS_VOLATILE (base))
2028 MEM_READONLY_P (ref) = 1;
2030 /* Mark static const strings readonly as well. */
2031 if (TREE_CODE (base) == STRING_CST
2032 && TREE_READONLY (base)
2033 && TREE_STATIC (base))
2034 MEM_READONLY_P (ref) = 1;
2036 /* Address-space information is on the base object. */
2037 if (TREE_CODE (base) == MEM_REF
2038 || TREE_CODE (base) == TARGET_MEM_REF)
2039 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2040 0))));
2041 else
2042 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2045 /* If this expression uses it's parent's alias set, mark it such
2046 that we won't change it. */
2047 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2048 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2050 /* If this is a decl, set the attributes of the MEM from it. */
2051 if (DECL_P (t))
2053 attrs.expr = t;
2054 attrs.offset_known_p = true;
2055 attrs.offset = 0;
2056 apply_bitpos = bitpos;
2057 new_size = DECL_SIZE_UNIT (t);
2060 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2061 else if (CONSTANT_CLASS_P (t))
2064 /* If this is a field reference, record it. */
2065 else if (TREE_CODE (t) == COMPONENT_REF)
2067 attrs.expr = t;
2068 attrs.offset_known_p = true;
2069 attrs.offset = 0;
2070 apply_bitpos = bitpos;
2071 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2072 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2075 /* If this is an array reference, look for an outer field reference. */
2076 else if (TREE_CODE (t) == ARRAY_REF)
2078 tree off_tree = size_zero_node;
2079 /* We can't modify t, because we use it at the end of the
2080 function. */
2081 tree t2 = t;
2085 tree index = TREE_OPERAND (t2, 1);
2086 tree low_bound = array_ref_low_bound (t2);
2087 tree unit_size = array_ref_element_size (t2);
2089 /* We assume all arrays have sizes that are a multiple of a byte.
2090 First subtract the lower bound, if any, in the type of the
2091 index, then convert to sizetype and multiply by the size of
2092 the array element. */
2093 if (! integer_zerop (low_bound))
2094 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2095 index, low_bound);
2097 off_tree = size_binop (PLUS_EXPR,
2098 size_binop (MULT_EXPR,
2099 fold_convert (sizetype,
2100 index),
2101 unit_size),
2102 off_tree);
2103 t2 = TREE_OPERAND (t2, 0);
2105 while (TREE_CODE (t2) == ARRAY_REF);
2107 if (DECL_P (t2)
2108 || (TREE_CODE (t2) == COMPONENT_REF
2109 /* For trailing arrays t2 doesn't have a size that
2110 covers all valid accesses. */
2111 && ! array_at_struct_end_p (t)))
2113 attrs.expr = t2;
2114 attrs.offset_known_p = false;
2115 if (poly_int_tree_p (off_tree, &attrs.offset))
2117 attrs.offset_known_p = true;
2118 apply_bitpos = bitpos;
2121 /* Else do not record a MEM_EXPR. */
2124 /* If this is an indirect reference, record it. */
2125 else if (TREE_CODE (t) == MEM_REF
2126 || TREE_CODE (t) == TARGET_MEM_REF)
2128 attrs.expr = t;
2129 attrs.offset_known_p = true;
2130 attrs.offset = 0;
2131 apply_bitpos = bitpos;
2134 /* If this is a reference based on a partitioned decl replace the
2135 base with a MEM_REF of the pointer representative we created
2136 during stack slot partitioning. */
2137 if (attrs.expr
2138 && VAR_P (base)
2139 && ! is_global_var (base)
2140 && cfun->gimple_df->decls_to_pointers != NULL)
2142 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2143 if (namep)
2145 attrs.expr = unshare_expr (attrs.expr);
2146 tree *orig_base = &attrs.expr;
2147 while (handled_component_p (*orig_base))
2148 orig_base = &TREE_OPERAND (*orig_base, 0);
2149 tree aptrt = reference_alias_ptr_type (*orig_base);
2150 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2151 build_int_cst (aptrt, 0));
2155 /* Compute the alignment. */
2156 unsigned int obj_align;
2157 unsigned HOST_WIDE_INT obj_bitpos;
2158 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2159 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2160 if (diff_align != 0)
2161 obj_align = MIN (obj_align, diff_align);
2162 attrs.align = MAX (attrs.align, obj_align);
2165 poly_uint64 const_size;
2166 if (poly_int_tree_p (new_size, &const_size))
2168 attrs.size_known_p = true;
2169 attrs.size = const_size;
2172 /* If we modified OFFSET based on T, then subtract the outstanding
2173 bit position offset. Similarly, increase the size of the accessed
2174 object to contain the negative offset. */
2175 if (maybe_ne (apply_bitpos, 0))
2177 gcc_assert (attrs.offset_known_p);
2178 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2179 attrs.offset -= bytepos;
2180 if (attrs.size_known_p)
2181 attrs.size += bytepos;
2184 /* Now set the attributes we computed above. */
2185 attrs.addrspace = as;
2186 set_mem_attrs (ref, &attrs);
2189 void
2190 set_mem_attributes (rtx ref, tree t, int objectp)
2192 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2195 /* Set the alias set of MEM to SET. */
2197 void
2198 set_mem_alias_set (rtx mem, alias_set_type set)
2200 /* If the new and old alias sets don't conflict, something is wrong. */
2201 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2202 mem_attrs attrs (*get_mem_attrs (mem));
2203 attrs.alias = set;
2204 set_mem_attrs (mem, &attrs);
2207 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2209 void
2210 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2212 mem_attrs attrs (*get_mem_attrs (mem));
2213 attrs.addrspace = addrspace;
2214 set_mem_attrs (mem, &attrs);
2217 /* Set the alignment of MEM to ALIGN bits. */
2219 void
2220 set_mem_align (rtx mem, unsigned int align)
2222 mem_attrs attrs (*get_mem_attrs (mem));
2223 attrs.align = align;
2224 set_mem_attrs (mem, &attrs);
2227 /* Set the expr for MEM to EXPR. */
2229 void
2230 set_mem_expr (rtx mem, tree expr)
2232 mem_attrs attrs (*get_mem_attrs (mem));
2233 attrs.expr = expr;
2234 set_mem_attrs (mem, &attrs);
2237 /* Set the offset of MEM to OFFSET. */
2239 void
2240 set_mem_offset (rtx mem, poly_int64 offset)
2242 mem_attrs attrs (*get_mem_attrs (mem));
2243 attrs.offset_known_p = true;
2244 attrs.offset = offset;
2245 set_mem_attrs (mem, &attrs);
2248 /* Clear the offset of MEM. */
2250 void
2251 clear_mem_offset (rtx mem)
2253 mem_attrs attrs (*get_mem_attrs (mem));
2254 attrs.offset_known_p = false;
2255 set_mem_attrs (mem, &attrs);
2258 /* Set the size of MEM to SIZE. */
2260 void
2261 set_mem_size (rtx mem, poly_int64 size)
2263 mem_attrs attrs (*get_mem_attrs (mem));
2264 attrs.size_known_p = true;
2265 attrs.size = size;
2266 set_mem_attrs (mem, &attrs);
2269 /* Clear the size of MEM. */
2271 void
2272 clear_mem_size (rtx mem)
2274 mem_attrs attrs (*get_mem_attrs (mem));
2275 attrs.size_known_p = false;
2276 set_mem_attrs (mem, &attrs);
2279 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2280 and its address changed to ADDR. (VOIDmode means don't change the mode.
2281 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2282 returned memory location is required to be valid. INPLACE is true if any
2283 changes can be made directly to MEMREF or false if MEMREF must be treated
2284 as immutable.
2286 The memory attributes are not changed. */
2288 static rtx
2289 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2290 bool inplace)
2292 addr_space_t as;
2293 rtx new_rtx;
2295 gcc_assert (MEM_P (memref));
2296 as = MEM_ADDR_SPACE (memref);
2297 if (mode == VOIDmode)
2298 mode = GET_MODE (memref);
2299 if (addr == 0)
2300 addr = XEXP (memref, 0);
2301 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2302 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2303 return memref;
2305 /* Don't validate address for LRA. LRA can make the address valid
2306 by itself in most efficient way. */
2307 if (validate && !lra_in_progress)
2309 if (reload_in_progress || reload_completed)
2310 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2311 else
2312 addr = memory_address_addr_space (mode, addr, as);
2315 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2316 return memref;
2318 if (inplace)
2320 XEXP (memref, 0) = addr;
2321 return memref;
2324 new_rtx = gen_rtx_MEM (mode, addr);
2325 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2326 return new_rtx;
2329 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2330 way we are changing MEMREF, so we only preserve the alias set. */
2333 change_address (rtx memref, machine_mode mode, rtx addr)
2335 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2336 machine_mode mmode = GET_MODE (new_rtx);
2337 class mem_attrs *defattrs;
2339 mem_attrs attrs (*get_mem_attrs (memref));
2340 defattrs = mode_mem_attrs[(int) mmode];
2341 attrs.expr = NULL_TREE;
2342 attrs.offset_known_p = false;
2343 attrs.size_known_p = defattrs->size_known_p;
2344 attrs.size = defattrs->size;
2345 attrs.align = defattrs->align;
2347 /* If there are no changes, just return the original memory reference. */
2348 if (new_rtx == memref)
2350 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2351 return new_rtx;
2353 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2354 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2357 set_mem_attrs (new_rtx, &attrs);
2358 return new_rtx;
2361 /* Return a memory reference like MEMREF, but with its mode changed
2362 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2363 nonzero, the memory address is forced to be valid.
2364 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2365 and the caller is responsible for adjusting MEMREF base register.
2366 If ADJUST_OBJECT is zero, the underlying object associated with the
2367 memory reference is left unchanged and the caller is responsible for
2368 dealing with it. Otherwise, if the new memory reference is outside
2369 the underlying object, even partially, then the object is dropped.
2370 SIZE, if nonzero, is the size of an access in cases where MODE
2371 has no inherent size. */
2374 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2375 int validate, int adjust_address, int adjust_object,
2376 poly_int64 size)
2378 rtx addr = XEXP (memref, 0);
2379 rtx new_rtx;
2380 scalar_int_mode address_mode;
2381 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2382 unsigned HOST_WIDE_INT max_align;
2383 #ifdef POINTERS_EXTEND_UNSIGNED
2384 scalar_int_mode pointer_mode
2385 = targetm.addr_space.pointer_mode (attrs.addrspace);
2386 #endif
2388 /* VOIDmode means no mode change for change_address_1. */
2389 if (mode == VOIDmode)
2390 mode = GET_MODE (memref);
2392 /* Take the size of non-BLKmode accesses from the mode. */
2393 defattrs = mode_mem_attrs[(int) mode];
2394 if (defattrs->size_known_p)
2395 size = defattrs->size;
2397 /* If there are no changes, just return the original memory reference. */
2398 if (mode == GET_MODE (memref)
2399 && known_eq (offset, 0)
2400 && (known_eq (size, 0)
2401 || (attrs.size_known_p && known_eq (attrs.size, size)))
2402 && (!validate || memory_address_addr_space_p (mode, addr,
2403 attrs.addrspace)))
2404 return memref;
2406 /* ??? Prefer to create garbage instead of creating shared rtl.
2407 This may happen even if offset is nonzero -- consider
2408 (plus (plus reg reg) const_int) -- so do this always. */
2409 addr = copy_rtx (addr);
2411 /* Convert a possibly large offset to a signed value within the
2412 range of the target address space. */
2413 address_mode = get_address_mode (memref);
2414 offset = trunc_int_for_mode (offset, address_mode);
2416 if (adjust_address)
2418 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2419 object, we can merge it into the LO_SUM. */
2420 if (GET_MODE (memref) != BLKmode
2421 && GET_CODE (addr) == LO_SUM
2422 && known_in_range_p (offset,
2423 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2424 / BITS_PER_UNIT)))
2425 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2426 plus_constant (address_mode,
2427 XEXP (addr, 1), offset));
2428 #ifdef POINTERS_EXTEND_UNSIGNED
2429 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2430 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2431 the fact that pointers are not allowed to overflow. */
2432 else if (POINTERS_EXTEND_UNSIGNED > 0
2433 && GET_CODE (addr) == ZERO_EXTEND
2434 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2435 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2436 addr = gen_rtx_ZERO_EXTEND (address_mode,
2437 plus_constant (pointer_mode,
2438 XEXP (addr, 0), offset));
2439 #endif
2440 else
2441 addr = plus_constant (address_mode, addr, offset);
2444 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2446 /* If the address is a REG, change_address_1 rightfully returns memref,
2447 but this would destroy memref's MEM_ATTRS. */
2448 if (new_rtx == memref && maybe_ne (offset, 0))
2449 new_rtx = copy_rtx (new_rtx);
2451 /* Conservatively drop the object if we don't know where we start from. */
2452 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2454 attrs.expr = NULL_TREE;
2455 attrs.alias = 0;
2458 /* Compute the new values of the memory attributes due to this adjustment.
2459 We add the offsets and update the alignment. */
2460 if (attrs.offset_known_p)
2462 attrs.offset += offset;
2464 /* Drop the object if the new left end is not within its bounds. */
2465 if (adjust_object && maybe_lt (attrs.offset, 0))
2467 attrs.expr = NULL_TREE;
2468 attrs.alias = 0;
2472 /* Compute the new alignment by taking the MIN of the alignment and the
2473 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2474 if zero. */
2475 if (maybe_ne (offset, 0))
2477 max_align = known_alignment (offset) * BITS_PER_UNIT;
2478 attrs.align = MIN (attrs.align, max_align);
2481 if (maybe_ne (size, 0))
2483 /* Drop the object if the new right end is not within its bounds. */
2484 if (adjust_object && maybe_gt (offset + size, attrs.size))
2486 attrs.expr = NULL_TREE;
2487 attrs.alias = 0;
2489 attrs.size_known_p = true;
2490 attrs.size = size;
2492 else if (attrs.size_known_p)
2494 gcc_assert (!adjust_object);
2495 attrs.size -= offset;
2496 /* ??? The store_by_pieces machinery generates negative sizes,
2497 so don't assert for that here. */
2500 set_mem_attrs (new_rtx, &attrs);
2502 return new_rtx;
2505 /* Return a memory reference like MEMREF, but with its mode changed
2506 to MODE and its address changed to ADDR, which is assumed to be
2507 MEMREF offset by OFFSET bytes. If VALIDATE is
2508 nonzero, the memory address is forced to be valid. */
2511 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2512 poly_int64 offset, int validate)
2514 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2515 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2518 /* Return a memory reference like MEMREF, but whose address is changed by
2519 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2520 known to be in OFFSET (possibly 1). */
2523 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2525 rtx new_rtx, addr = XEXP (memref, 0);
2526 machine_mode address_mode;
2527 class mem_attrs *defattrs;
2529 mem_attrs attrs (*get_mem_attrs (memref));
2530 address_mode = get_address_mode (memref);
2531 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2533 /* At this point we don't know _why_ the address is invalid. It
2534 could have secondary memory references, multiplies or anything.
2536 However, if we did go and rearrange things, we can wind up not
2537 being able to recognize the magic around pic_offset_table_rtx.
2538 This stuff is fragile, and is yet another example of why it is
2539 bad to expose PIC machinery too early. */
2540 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2541 attrs.addrspace)
2542 && GET_CODE (addr) == PLUS
2543 && XEXP (addr, 0) == pic_offset_table_rtx)
2545 addr = force_reg (GET_MODE (addr), addr);
2546 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2549 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2550 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2552 /* If there are no changes, just return the original memory reference. */
2553 if (new_rtx == memref)
2554 return new_rtx;
2556 /* Update the alignment to reflect the offset. Reset the offset, which
2557 we don't know. */
2558 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2559 attrs.offset_known_p = false;
2560 attrs.size_known_p = defattrs->size_known_p;
2561 attrs.size = defattrs->size;
2562 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2563 set_mem_attrs (new_rtx, &attrs);
2564 return new_rtx;
2567 /* Return a memory reference like MEMREF, but with its address changed to
2568 ADDR. The caller is asserting that the actual piece of memory pointed
2569 to is the same, just the form of the address is being changed, such as
2570 by putting something into a register. INPLACE is true if any changes
2571 can be made directly to MEMREF or false if MEMREF must be treated as
2572 immutable. */
2575 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2577 /* change_address_1 copies the memory attribute structure without change
2578 and that's exactly what we want here. */
2579 update_temp_slot_address (XEXP (memref, 0), addr);
2580 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2583 /* Likewise, but the reference is not required to be valid. */
2586 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2588 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2591 /* Return a memory reference like MEMREF, but with its mode widened to
2592 MODE and offset by OFFSET. This would be used by targets that e.g.
2593 cannot issue QImode memory operations and have to use SImode memory
2594 operations plus masking logic. */
2597 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2599 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2600 poly_uint64 size = GET_MODE_SIZE (mode);
2602 /* If there are no changes, just return the original memory reference. */
2603 if (new_rtx == memref)
2604 return new_rtx;
2606 mem_attrs attrs (*get_mem_attrs (new_rtx));
2608 /* If we don't know what offset we were at within the expression, then
2609 we can't know if we've overstepped the bounds. */
2610 if (! attrs.offset_known_p)
2611 attrs.expr = NULL_TREE;
2613 while (attrs.expr)
2615 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2617 tree field = TREE_OPERAND (attrs.expr, 1);
2618 tree offset = component_ref_field_offset (attrs.expr);
2620 if (! DECL_SIZE_UNIT (field))
2622 attrs.expr = NULL_TREE;
2623 break;
2626 /* Is the field at least as large as the access? If so, ok,
2627 otherwise strip back to the containing structure. */
2628 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2629 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2630 && known_ge (attrs.offset, 0))
2631 break;
2633 poly_uint64 suboffset;
2634 if (!poly_int_tree_p (offset, &suboffset))
2636 attrs.expr = NULL_TREE;
2637 break;
2640 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2641 attrs.offset += suboffset;
2642 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2643 / BITS_PER_UNIT);
2645 /* Similarly for the decl. */
2646 else if (DECL_P (attrs.expr)
2647 && DECL_SIZE_UNIT (attrs.expr)
2648 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2649 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2650 size)
2651 && known_ge (attrs.offset, 0))
2652 break;
2653 else
2655 /* The widened memory access overflows the expression, which means
2656 that it could alias another expression. Zap it. */
2657 attrs.expr = NULL_TREE;
2658 break;
2662 if (! attrs.expr)
2663 attrs.offset_known_p = false;
2665 /* The widened memory may alias other stuff, so zap the alias set. */
2666 /* ??? Maybe use get_alias_set on any remaining expression. */
2667 attrs.alias = 0;
2668 attrs.size_known_p = true;
2669 attrs.size = size;
2670 set_mem_attrs (new_rtx, &attrs);
2671 return new_rtx;
2674 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2675 static GTY(()) tree spill_slot_decl;
2677 tree
2678 get_spill_slot_decl (bool force_build_p)
2680 tree d = spill_slot_decl;
2681 rtx rd;
2683 if (d || !force_build_p)
2684 return d;
2686 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2687 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2688 DECL_ARTIFICIAL (d) = 1;
2689 DECL_IGNORED_P (d) = 1;
2690 TREE_USED (d) = 1;
2691 spill_slot_decl = d;
2693 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2694 MEM_NOTRAP_P (rd) = 1;
2695 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2696 attrs.alias = new_alias_set ();
2697 attrs.expr = d;
2698 set_mem_attrs (rd, &attrs);
2699 SET_DECL_RTL (d, rd);
2701 return d;
2704 /* Given MEM, a result from assign_stack_local, fill in the memory
2705 attributes as appropriate for a register allocator spill slot.
2706 These slots are not aliasable by other memory. We arrange for
2707 them all to use a single MEM_EXPR, so that the aliasing code can
2708 work properly in the case of shared spill slots. */
2710 void
2711 set_mem_attrs_for_spill (rtx mem)
2713 rtx addr;
2715 mem_attrs attrs (*get_mem_attrs (mem));
2716 attrs.expr = get_spill_slot_decl (true);
2717 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2718 attrs.addrspace = ADDR_SPACE_GENERIC;
2720 /* We expect the incoming memory to be of the form:
2721 (mem:MODE (plus (reg sfp) (const_int offset)))
2722 with perhaps the plus missing for offset = 0. */
2723 addr = XEXP (mem, 0);
2724 attrs.offset_known_p = true;
2725 strip_offset (addr, &attrs.offset);
2727 set_mem_attrs (mem, &attrs);
2728 MEM_NOTRAP_P (mem) = 1;
2731 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2733 rtx_code_label *
2734 gen_label_rtx (void)
2736 return as_a <rtx_code_label *> (
2737 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2738 NULL, label_num++, NULL));
2741 /* For procedure integration. */
2743 /* Install new pointers to the first and last insns in the chain.
2744 Also, set cur_insn_uid to one higher than the last in use.
2745 Used for an inline-procedure after copying the insn chain. */
2747 void
2748 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2750 rtx_insn *insn;
2752 set_first_insn (first);
2753 set_last_insn (last);
2754 cur_insn_uid = 0;
2756 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2758 int debug_count = 0;
2760 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2761 cur_debug_insn_uid = 0;
2763 for (insn = first; insn; insn = NEXT_INSN (insn))
2764 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2765 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2766 else
2768 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2769 if (DEBUG_INSN_P (insn))
2770 debug_count++;
2773 if (debug_count)
2774 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2775 else
2776 cur_debug_insn_uid++;
2778 else
2779 for (insn = first; insn; insn = NEXT_INSN (insn))
2780 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2782 cur_insn_uid++;
2785 /* Go through all the RTL insn bodies and copy any invalid shared
2786 structure. This routine should only be called once. */
2788 static void
2789 unshare_all_rtl_1 (rtx_insn *insn)
2791 /* Unshare just about everything else. */
2792 unshare_all_rtl_in_chain (insn);
2794 /* Make sure the addresses of stack slots found outside the insn chain
2795 (such as, in DECL_RTL of a variable) are not shared
2796 with the insn chain.
2798 This special care is necessary when the stack slot MEM does not
2799 actually appear in the insn chain. If it does appear, its address
2800 is unshared from all else at that point. */
2801 unsigned int i;
2802 rtx temp;
2803 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2804 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2807 /* Go through all the RTL insn bodies and copy any invalid shared
2808 structure, again. This is a fairly expensive thing to do so it
2809 should be done sparingly. */
2811 void
2812 unshare_all_rtl_again (rtx_insn *insn)
2814 rtx_insn *p;
2815 tree decl;
2817 for (p = insn; p; p = NEXT_INSN (p))
2818 if (INSN_P (p))
2820 reset_used_flags (PATTERN (p));
2821 reset_used_flags (REG_NOTES (p));
2822 if (CALL_P (p))
2823 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2826 /* Make sure that virtual stack slots are not shared. */
2827 set_used_decls (DECL_INITIAL (cfun->decl));
2829 /* Make sure that virtual parameters are not shared. */
2830 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2831 set_used_flags (DECL_RTL (decl));
2833 rtx temp;
2834 unsigned int i;
2835 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2836 reset_used_flags (temp);
2838 unshare_all_rtl_1 (insn);
2841 unsigned int
2842 unshare_all_rtl (void)
2844 unshare_all_rtl_1 (get_insns ());
2846 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2848 if (DECL_RTL_SET_P (decl))
2849 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2850 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2853 return 0;
2857 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2858 Recursively does the same for subexpressions. */
2860 static void
2861 verify_rtx_sharing (rtx orig, rtx insn)
2863 rtx x = orig;
2864 int i;
2865 enum rtx_code code;
2866 const char *format_ptr;
2868 if (x == 0)
2869 return;
2871 code = GET_CODE (x);
2873 /* These types may be freely shared. */
2875 switch (code)
2877 case REG:
2878 case DEBUG_EXPR:
2879 case VALUE:
2880 CASE_CONST_ANY:
2881 case SYMBOL_REF:
2882 case LABEL_REF:
2883 case CODE_LABEL:
2884 case PC:
2885 case CC0:
2886 case RETURN:
2887 case SIMPLE_RETURN:
2888 case SCRATCH:
2889 /* SCRATCH must be shared because they represent distinct values. */
2890 return;
2891 case CLOBBER:
2892 case CLOBBER_HIGH:
2893 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2894 clobbers or clobbers of hard registers that originated as pseudos.
2895 This is needed to allow safe register renaming. */
2896 if (REG_P (XEXP (x, 0))
2897 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2898 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2899 return;
2900 break;
2902 case CONST:
2903 if (shared_const_p (orig))
2904 return;
2905 break;
2907 case MEM:
2908 /* A MEM is allowed to be shared if its address is constant. */
2909 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2910 || reload_completed || reload_in_progress)
2911 return;
2913 break;
2915 default:
2916 break;
2919 /* This rtx may not be shared. If it has already been seen,
2920 replace it with a copy of itself. */
2921 if (flag_checking && RTX_FLAG (x, used))
2923 error ("invalid rtl sharing found in the insn");
2924 debug_rtx (insn);
2925 error ("shared rtx");
2926 debug_rtx (x);
2927 internal_error ("internal consistency failure");
2929 gcc_assert (!RTX_FLAG (x, used));
2931 RTX_FLAG (x, used) = 1;
2933 /* Now scan the subexpressions recursively. */
2935 format_ptr = GET_RTX_FORMAT (code);
2937 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2939 switch (*format_ptr++)
2941 case 'e':
2942 verify_rtx_sharing (XEXP (x, i), insn);
2943 break;
2945 case 'E':
2946 if (XVEC (x, i) != NULL)
2948 int j;
2949 int len = XVECLEN (x, i);
2951 for (j = 0; j < len; j++)
2953 /* We allow sharing of ASM_OPERANDS inside single
2954 instruction. */
2955 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2956 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2957 == ASM_OPERANDS))
2958 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2959 else
2960 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2963 break;
2966 return;
2969 /* Reset used-flags for INSN. */
2971 static void
2972 reset_insn_used_flags (rtx insn)
2974 gcc_assert (INSN_P (insn));
2975 reset_used_flags (PATTERN (insn));
2976 reset_used_flags (REG_NOTES (insn));
2977 if (CALL_P (insn))
2978 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2981 /* Go through all the RTL insn bodies and clear all the USED bits. */
2983 static void
2984 reset_all_used_flags (void)
2986 rtx_insn *p;
2988 for (p = get_insns (); p; p = NEXT_INSN (p))
2989 if (INSN_P (p))
2991 rtx pat = PATTERN (p);
2992 if (GET_CODE (pat) != SEQUENCE)
2993 reset_insn_used_flags (p);
2994 else
2996 gcc_assert (REG_NOTES (p) == NULL);
2997 for (int i = 0; i < XVECLEN (pat, 0); i++)
2999 rtx insn = XVECEXP (pat, 0, i);
3000 if (INSN_P (insn))
3001 reset_insn_used_flags (insn);
3007 /* Verify sharing in INSN. */
3009 static void
3010 verify_insn_sharing (rtx insn)
3012 gcc_assert (INSN_P (insn));
3013 verify_rtx_sharing (PATTERN (insn), insn);
3014 verify_rtx_sharing (REG_NOTES (insn), insn);
3015 if (CALL_P (insn))
3016 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
3019 /* Go through all the RTL insn bodies and check that there is no unexpected
3020 sharing in between the subexpressions. */
3022 DEBUG_FUNCTION void
3023 verify_rtl_sharing (void)
3025 rtx_insn *p;
3027 timevar_push (TV_VERIFY_RTL_SHARING);
3029 reset_all_used_flags ();
3031 for (p = get_insns (); p; p = NEXT_INSN (p))
3032 if (INSN_P (p))
3034 rtx pat = PATTERN (p);
3035 if (GET_CODE (pat) != SEQUENCE)
3036 verify_insn_sharing (p);
3037 else
3038 for (int i = 0; i < XVECLEN (pat, 0); i++)
3040 rtx insn = XVECEXP (pat, 0, i);
3041 if (INSN_P (insn))
3042 verify_insn_sharing (insn);
3046 reset_all_used_flags ();
3048 timevar_pop (TV_VERIFY_RTL_SHARING);
3051 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3052 Assumes the mark bits are cleared at entry. */
3054 void
3055 unshare_all_rtl_in_chain (rtx_insn *insn)
3057 for (; insn; insn = NEXT_INSN (insn))
3058 if (INSN_P (insn))
3060 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3061 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3062 if (CALL_P (insn))
3063 CALL_INSN_FUNCTION_USAGE (insn)
3064 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3068 /* Go through all virtual stack slots of a function and mark them as
3069 shared. We never replace the DECL_RTLs themselves with a copy,
3070 but expressions mentioned into a DECL_RTL cannot be shared with
3071 expressions in the instruction stream.
3073 Note that reload may convert pseudo registers into memories in-place.
3074 Pseudo registers are always shared, but MEMs never are. Thus if we
3075 reset the used flags on MEMs in the instruction stream, we must set
3076 them again on MEMs that appear in DECL_RTLs. */
3078 static void
3079 set_used_decls (tree blk)
3081 tree t;
3083 /* Mark decls. */
3084 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3085 if (DECL_RTL_SET_P (t))
3086 set_used_flags (DECL_RTL (t));
3088 /* Now process sub-blocks. */
3089 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3090 set_used_decls (t);
3093 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3094 Recursively does the same for subexpressions. Uses
3095 copy_rtx_if_shared_1 to reduce stack space. */
3098 copy_rtx_if_shared (rtx orig)
3100 copy_rtx_if_shared_1 (&orig);
3101 return orig;
3104 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3105 use. Recursively does the same for subexpressions. */
3107 static void
3108 copy_rtx_if_shared_1 (rtx *orig1)
3110 rtx x;
3111 int i;
3112 enum rtx_code code;
3113 rtx *last_ptr;
3114 const char *format_ptr;
3115 int copied = 0;
3116 int length;
3118 /* Repeat is used to turn tail-recursion into iteration. */
3119 repeat:
3120 x = *orig1;
3122 if (x == 0)
3123 return;
3125 code = GET_CODE (x);
3127 /* These types may be freely shared. */
3129 switch (code)
3131 case REG:
3132 case DEBUG_EXPR:
3133 case VALUE:
3134 CASE_CONST_ANY:
3135 case SYMBOL_REF:
3136 case LABEL_REF:
3137 case CODE_LABEL:
3138 case PC:
3139 case CC0:
3140 case RETURN:
3141 case SIMPLE_RETURN:
3142 case SCRATCH:
3143 /* SCRATCH must be shared because they represent distinct values. */
3144 return;
3145 case CLOBBER:
3146 case CLOBBER_HIGH:
3147 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3148 clobbers or clobbers of hard registers that originated as pseudos.
3149 This is needed to allow safe register renaming. */
3150 if (REG_P (XEXP (x, 0))
3151 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3152 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3153 return;
3154 break;
3156 case CONST:
3157 if (shared_const_p (x))
3158 return;
3159 break;
3161 case DEBUG_INSN:
3162 case INSN:
3163 case JUMP_INSN:
3164 case CALL_INSN:
3165 case NOTE:
3166 case BARRIER:
3167 /* The chain of insns is not being copied. */
3168 return;
3170 default:
3171 break;
3174 /* This rtx may not be shared. If it has already been seen,
3175 replace it with a copy of itself. */
3177 if (RTX_FLAG (x, used))
3179 x = shallow_copy_rtx (x);
3180 copied = 1;
3182 RTX_FLAG (x, used) = 1;
3184 /* Now scan the subexpressions recursively.
3185 We can store any replaced subexpressions directly into X
3186 since we know X is not shared! Any vectors in X
3187 must be copied if X was copied. */
3189 format_ptr = GET_RTX_FORMAT (code);
3190 length = GET_RTX_LENGTH (code);
3191 last_ptr = NULL;
3193 for (i = 0; i < length; i++)
3195 switch (*format_ptr++)
3197 case 'e':
3198 if (last_ptr)
3199 copy_rtx_if_shared_1 (last_ptr);
3200 last_ptr = &XEXP (x, i);
3201 break;
3203 case 'E':
3204 if (XVEC (x, i) != NULL)
3206 int j;
3207 int len = XVECLEN (x, i);
3209 /* Copy the vector iff I copied the rtx and the length
3210 is nonzero. */
3211 if (copied && len > 0)
3212 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3214 /* Call recursively on all inside the vector. */
3215 for (j = 0; j < len; j++)
3217 if (last_ptr)
3218 copy_rtx_if_shared_1 (last_ptr);
3219 last_ptr = &XVECEXP (x, i, j);
3222 break;
3225 *orig1 = x;
3226 if (last_ptr)
3228 orig1 = last_ptr;
3229 goto repeat;
3231 return;
3234 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3236 static void
3237 mark_used_flags (rtx x, int flag)
3239 int i, j;
3240 enum rtx_code code;
3241 const char *format_ptr;
3242 int length;
3244 /* Repeat is used to turn tail-recursion into iteration. */
3245 repeat:
3246 if (x == 0)
3247 return;
3249 code = GET_CODE (x);
3251 /* These types may be freely shared so we needn't do any resetting
3252 for them. */
3254 switch (code)
3256 case REG:
3257 case DEBUG_EXPR:
3258 case VALUE:
3259 CASE_CONST_ANY:
3260 case SYMBOL_REF:
3261 case CODE_LABEL:
3262 case PC:
3263 case CC0:
3264 case RETURN:
3265 case SIMPLE_RETURN:
3266 return;
3268 case DEBUG_INSN:
3269 case INSN:
3270 case JUMP_INSN:
3271 case CALL_INSN:
3272 case NOTE:
3273 case LABEL_REF:
3274 case BARRIER:
3275 /* The chain of insns is not being copied. */
3276 return;
3278 default:
3279 break;
3282 RTX_FLAG (x, used) = flag;
3284 format_ptr = GET_RTX_FORMAT (code);
3285 length = GET_RTX_LENGTH (code);
3287 for (i = 0; i < length; i++)
3289 switch (*format_ptr++)
3291 case 'e':
3292 if (i == length-1)
3294 x = XEXP (x, i);
3295 goto repeat;
3297 mark_used_flags (XEXP (x, i), flag);
3298 break;
3300 case 'E':
3301 for (j = 0; j < XVECLEN (x, i); j++)
3302 mark_used_flags (XVECEXP (x, i, j), flag);
3303 break;
3308 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3309 to look for shared sub-parts. */
3311 void
3312 reset_used_flags (rtx x)
3314 mark_used_flags (x, 0);
3317 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3318 to look for shared sub-parts. */
3320 void
3321 set_used_flags (rtx x)
3323 mark_used_flags (x, 1);
3326 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3327 Return X or the rtx for the pseudo reg the value of X was copied into.
3328 OTHER must be valid as a SET_DEST. */
3331 make_safe_from (rtx x, rtx other)
3333 while (1)
3334 switch (GET_CODE (other))
3336 case SUBREG:
3337 other = SUBREG_REG (other);
3338 break;
3339 case STRICT_LOW_PART:
3340 case SIGN_EXTEND:
3341 case ZERO_EXTEND:
3342 other = XEXP (other, 0);
3343 break;
3344 default:
3345 goto done;
3347 done:
3348 if ((MEM_P (other)
3349 && ! CONSTANT_P (x)
3350 && !REG_P (x)
3351 && GET_CODE (x) != SUBREG)
3352 || (REG_P (other)
3353 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3354 || reg_mentioned_p (other, x))))
3356 rtx temp = gen_reg_rtx (GET_MODE (x));
3357 emit_move_insn (temp, x);
3358 return temp;
3360 return x;
3363 /* Emission of insns (adding them to the doubly-linked list). */
3365 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3367 rtx_insn *
3368 get_last_insn_anywhere (void)
3370 struct sequence_stack *seq;
3371 for (seq = get_current_sequence (); seq; seq = seq->next)
3372 if (seq->last != 0)
3373 return seq->last;
3374 return 0;
3377 /* Return the first nonnote insn emitted in current sequence or current
3378 function. This routine looks inside SEQUENCEs. */
3380 rtx_insn *
3381 get_first_nonnote_insn (void)
3383 rtx_insn *insn = get_insns ();
3385 if (insn)
3387 if (NOTE_P (insn))
3388 for (insn = next_insn (insn);
3389 insn && NOTE_P (insn);
3390 insn = next_insn (insn))
3391 continue;
3392 else
3394 if (NONJUMP_INSN_P (insn)
3395 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3396 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3400 return insn;
3403 /* Return the last nonnote insn emitted in current sequence or current
3404 function. This routine looks inside SEQUENCEs. */
3406 rtx_insn *
3407 get_last_nonnote_insn (void)
3409 rtx_insn *insn = get_last_insn ();
3411 if (insn)
3413 if (NOTE_P (insn))
3414 for (insn = previous_insn (insn);
3415 insn && NOTE_P (insn);
3416 insn = previous_insn (insn))
3417 continue;
3418 else
3420 if (NONJUMP_INSN_P (insn))
3421 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3422 insn = seq->insn (seq->len () - 1);
3426 return insn;
3429 /* Return the number of actual (non-debug) insns emitted in this
3430 function. */
3433 get_max_insn_count (void)
3435 int n = cur_insn_uid;
3437 /* The table size must be stable across -g, to avoid codegen
3438 differences due to debug insns, and not be affected by
3439 -fmin-insn-uid, to avoid excessive table size and to simplify
3440 debugging of -fcompare-debug failures. */
3441 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3442 n -= cur_debug_insn_uid;
3443 else
3444 n -= MIN_NONDEBUG_INSN_UID;
3446 return n;
3450 /* Return the next insn. If it is a SEQUENCE, return the first insn
3451 of the sequence. */
3453 rtx_insn *
3454 next_insn (rtx_insn *insn)
3456 if (insn)
3458 insn = NEXT_INSN (insn);
3459 if (insn && NONJUMP_INSN_P (insn)
3460 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3461 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3464 return insn;
3467 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3468 of the sequence. */
3470 rtx_insn *
3471 previous_insn (rtx_insn *insn)
3473 if (insn)
3475 insn = PREV_INSN (insn);
3476 if (insn && NONJUMP_INSN_P (insn))
3477 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3478 insn = seq->insn (seq->len () - 1);
3481 return insn;
3484 /* Return the next insn after INSN that is not a NOTE. This routine does not
3485 look inside SEQUENCEs. */
3487 rtx_insn *
3488 next_nonnote_insn (rtx_insn *insn)
3490 while (insn)
3492 insn = NEXT_INSN (insn);
3493 if (insn == 0 || !NOTE_P (insn))
3494 break;
3497 return insn;
3500 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3501 routine does not look inside SEQUENCEs. */
3503 rtx_insn *
3504 next_nondebug_insn (rtx_insn *insn)
3506 while (insn)
3508 insn = NEXT_INSN (insn);
3509 if (insn == 0 || !DEBUG_INSN_P (insn))
3510 break;
3513 return insn;
3516 /* Return the previous insn before INSN that is not a NOTE. This routine does
3517 not look inside SEQUENCEs. */
3519 rtx_insn *
3520 prev_nonnote_insn (rtx_insn *insn)
3522 while (insn)
3524 insn = PREV_INSN (insn);
3525 if (insn == 0 || !NOTE_P (insn))
3526 break;
3529 return insn;
3532 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3533 This routine does not look inside SEQUENCEs. */
3535 rtx_insn *
3536 prev_nondebug_insn (rtx_insn *insn)
3538 while (insn)
3540 insn = PREV_INSN (insn);
3541 if (insn == 0 || !DEBUG_INSN_P (insn))
3542 break;
3545 return insn;
3548 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3549 This routine does not look inside SEQUENCEs. */
3551 rtx_insn *
3552 next_nonnote_nondebug_insn (rtx_insn *insn)
3554 while (insn)
3556 insn = NEXT_INSN (insn);
3557 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3558 break;
3561 return insn;
3564 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3565 but stop the search before we enter another basic block. This
3566 routine does not look inside SEQUENCEs. */
3568 rtx_insn *
3569 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3571 while (insn)
3573 insn = NEXT_INSN (insn);
3574 if (insn == 0)
3575 break;
3576 if (DEBUG_INSN_P (insn))
3577 continue;
3578 if (!NOTE_P (insn))
3579 break;
3580 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3581 return NULL;
3584 return insn;
3587 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3588 This routine does not look inside SEQUENCEs. */
3590 rtx_insn *
3591 prev_nonnote_nondebug_insn (rtx_insn *insn)
3593 while (insn)
3595 insn = PREV_INSN (insn);
3596 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3597 break;
3600 return insn;
3603 /* Return the previous insn before INSN that is not a NOTE nor
3604 DEBUG_INSN, but stop the search before we enter another basic
3605 block. This routine does not look inside SEQUENCEs. */
3607 rtx_insn *
3608 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3610 while (insn)
3612 insn = PREV_INSN (insn);
3613 if (insn == 0)
3614 break;
3615 if (DEBUG_INSN_P (insn))
3616 continue;
3617 if (!NOTE_P (insn))
3618 break;
3619 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3620 return NULL;
3623 return insn;
3626 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3627 or 0, if there is none. This routine does not look inside
3628 SEQUENCEs. */
3630 rtx_insn *
3631 next_real_insn (rtx_insn *insn)
3633 while (insn)
3635 insn = NEXT_INSN (insn);
3636 if (insn == 0 || INSN_P (insn))
3637 break;
3640 return insn;
3643 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3644 or 0, if there is none. This routine does not look inside
3645 SEQUENCEs. */
3647 rtx_insn *
3648 prev_real_insn (rtx_insn *insn)
3650 while (insn)
3652 insn = PREV_INSN (insn);
3653 if (insn == 0 || INSN_P (insn))
3654 break;
3657 return insn;
3660 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3661 or 0, if there is none. This routine does not look inside
3662 SEQUENCEs. */
3664 rtx_insn *
3665 next_real_nondebug_insn (rtx uncast_insn)
3667 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3669 while (insn)
3671 insn = NEXT_INSN (insn);
3672 if (insn == 0 || NONDEBUG_INSN_P (insn))
3673 break;
3676 return insn;
3679 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3680 or 0, if there is none. This routine does not look inside
3681 SEQUENCEs. */
3683 rtx_insn *
3684 prev_real_nondebug_insn (rtx_insn *insn)
3686 while (insn)
3688 insn = PREV_INSN (insn);
3689 if (insn == 0 || NONDEBUG_INSN_P (insn))
3690 break;
3693 return insn;
3696 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3697 This routine does not look inside SEQUENCEs. */
3699 rtx_call_insn *
3700 last_call_insn (void)
3702 rtx_insn *insn;
3704 for (insn = get_last_insn ();
3705 insn && !CALL_P (insn);
3706 insn = PREV_INSN (insn))
3709 return safe_as_a <rtx_call_insn *> (insn);
3712 /* Find the next insn after INSN that really does something. This routine
3713 does not look inside SEQUENCEs. After reload this also skips over
3714 standalone USE and CLOBBER insn. */
3717 active_insn_p (const rtx_insn *insn)
3719 return (CALL_P (insn) || JUMP_P (insn)
3720 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3721 || (NONJUMP_INSN_P (insn)
3722 && (! reload_completed
3723 || (GET_CODE (PATTERN (insn)) != USE
3724 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3727 rtx_insn *
3728 next_active_insn (rtx_insn *insn)
3730 while (insn)
3732 insn = NEXT_INSN (insn);
3733 if (insn == 0 || active_insn_p (insn))
3734 break;
3737 return insn;
3740 /* Find the last insn before INSN that really does something. This routine
3741 does not look inside SEQUENCEs. After reload this also skips over
3742 standalone USE and CLOBBER insn. */
3744 rtx_insn *
3745 prev_active_insn (rtx_insn *insn)
3747 while (insn)
3749 insn = PREV_INSN (insn);
3750 if (insn == 0 || active_insn_p (insn))
3751 break;
3754 return insn;
3757 /* Return the next insn that uses CC0 after INSN, which is assumed to
3758 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3759 applied to the result of this function should yield INSN).
3761 Normally, this is simply the next insn. However, if a REG_CC_USER note
3762 is present, it contains the insn that uses CC0.
3764 Return 0 if we can't find the insn. */
3766 rtx_insn *
3767 next_cc0_user (rtx_insn *insn)
3769 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3771 if (note)
3772 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3774 insn = next_nonnote_insn (insn);
3775 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3776 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3778 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3779 return insn;
3781 return 0;
3784 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3785 note, it is the previous insn. */
3787 rtx_insn *
3788 prev_cc0_setter (rtx_insn *insn)
3790 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3792 if (note)
3793 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3795 insn = prev_nonnote_insn (insn);
3796 gcc_assert (sets_cc0_p (PATTERN (insn)));
3798 return insn;
3801 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3803 static int
3804 find_auto_inc (const_rtx x, const_rtx reg)
3806 subrtx_iterator::array_type array;
3807 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3809 const_rtx x = *iter;
3810 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3811 && rtx_equal_p (reg, XEXP (x, 0)))
3812 return true;
3814 return false;
3817 /* Increment the label uses for all labels present in rtx. */
3819 static void
3820 mark_label_nuses (rtx x)
3822 enum rtx_code code;
3823 int i, j;
3824 const char *fmt;
3826 code = GET_CODE (x);
3827 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3828 LABEL_NUSES (label_ref_label (x))++;
3830 fmt = GET_RTX_FORMAT (code);
3831 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3833 if (fmt[i] == 'e')
3834 mark_label_nuses (XEXP (x, i));
3835 else if (fmt[i] == 'E')
3836 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3837 mark_label_nuses (XVECEXP (x, i, j));
3842 /* Try splitting insns that can be split for better scheduling.
3843 PAT is the pattern which might split.
3844 TRIAL is the insn providing PAT.
3845 LAST is nonzero if we should return the last insn of the sequence produced.
3847 If this routine succeeds in splitting, it returns the first or last
3848 replacement insn depending on the value of LAST. Otherwise, it
3849 returns TRIAL. If the insn to be returned can be split, it will be. */
3851 rtx_insn *
3852 try_split (rtx pat, rtx_insn *trial, int last)
3854 rtx_insn *before, *after;
3855 rtx note;
3856 rtx_insn *seq, *tem;
3857 profile_probability probability;
3858 rtx_insn *insn_last, *insn;
3859 int njumps = 0;
3860 rtx_insn *call_insn = NULL;
3862 /* We're not good at redistributing frame information. */
3863 if (RTX_FRAME_RELATED_P (trial))
3864 return trial;
3866 if (any_condjump_p (trial)
3867 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3868 split_branch_probability
3869 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3870 else
3871 split_branch_probability = profile_probability::uninitialized ();
3873 probability = split_branch_probability;
3875 seq = split_insns (pat, trial);
3877 split_branch_probability = profile_probability::uninitialized ();
3879 if (!seq)
3880 return trial;
3882 /* Avoid infinite loop if any insn of the result matches
3883 the original pattern. */
3884 insn_last = seq;
3885 while (1)
3887 if (INSN_P (insn_last)
3888 && rtx_equal_p (PATTERN (insn_last), pat))
3889 return trial;
3890 if (!NEXT_INSN (insn_last))
3891 break;
3892 insn_last = NEXT_INSN (insn_last);
3895 /* We will be adding the new sequence to the function. The splitters
3896 may have introduced invalid RTL sharing, so unshare the sequence now. */
3897 unshare_all_rtl_in_chain (seq);
3899 /* Mark labels and copy flags. */
3900 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3902 if (JUMP_P (insn))
3904 if (JUMP_P (trial))
3905 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3906 mark_jump_label (PATTERN (insn), insn, 0);
3907 njumps++;
3908 if (probability.initialized_p ()
3909 && any_condjump_p (insn)
3910 && !find_reg_note (insn, REG_BR_PROB, 0))
3912 /* We can preserve the REG_BR_PROB notes only if exactly
3913 one jump is created, otherwise the machine description
3914 is responsible for this step using
3915 split_branch_probability variable. */
3916 gcc_assert (njumps == 1);
3917 add_reg_br_prob_note (insn, probability);
3922 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3923 in SEQ and copy any additional information across. */
3924 if (CALL_P (trial))
3926 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3927 if (CALL_P (insn))
3929 gcc_assert (call_insn == NULL_RTX);
3930 call_insn = insn;
3932 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3933 target may have explicitly specified. */
3934 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3935 while (*p)
3936 p = &XEXP (*p, 1);
3937 *p = CALL_INSN_FUNCTION_USAGE (trial);
3939 /* If the old call was a sibling call, the new one must
3940 be too. */
3941 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3945 /* Copy notes, particularly those related to the CFG. */
3946 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3948 switch (REG_NOTE_KIND (note))
3950 case REG_EH_REGION:
3951 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3952 break;
3954 case REG_NORETURN:
3955 case REG_SETJMP:
3956 case REG_TM:
3957 case REG_CALL_NOCF_CHECK:
3958 case REG_CALL_ARG_LOCATION:
3959 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3961 if (CALL_P (insn))
3962 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3964 break;
3966 case REG_NON_LOCAL_GOTO:
3967 case REG_LABEL_TARGET:
3968 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3970 if (JUMP_P (insn))
3971 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3973 break;
3975 case REG_INC:
3976 if (!AUTO_INC_DEC)
3977 break;
3979 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3981 rtx reg = XEXP (note, 0);
3982 if (!FIND_REG_INC_NOTE (insn, reg)
3983 && find_auto_inc (PATTERN (insn), reg))
3984 add_reg_note (insn, REG_INC, reg);
3986 break;
3988 case REG_ARGS_SIZE:
3989 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3990 break;
3992 case REG_CALL_DECL:
3993 gcc_assert (call_insn != NULL_RTX);
3994 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3995 break;
3997 default:
3998 break;
4002 /* If there are LABELS inside the split insns increment the
4003 usage count so we don't delete the label. */
4004 if (INSN_P (trial))
4006 insn = insn_last;
4007 while (insn != NULL_RTX)
4009 /* JUMP_P insns have already been "marked" above. */
4010 if (NONJUMP_INSN_P (insn))
4011 mark_label_nuses (PATTERN (insn));
4013 insn = PREV_INSN (insn);
4017 before = PREV_INSN (trial);
4018 after = NEXT_INSN (trial);
4020 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
4022 delete_insn (trial);
4024 /* Recursively call try_split for each new insn created; by the
4025 time control returns here that insn will be fully split, so
4026 set LAST and continue from the insn after the one returned.
4027 We can't use next_active_insn here since AFTER may be a note.
4028 Ignore deleted insns, which can be occur if not optimizing. */
4029 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4030 if (! tem->deleted () && INSN_P (tem))
4031 tem = try_split (PATTERN (tem), tem, 1);
4033 /* Return either the first or the last insn, depending on which was
4034 requested. */
4035 return last
4036 ? (after ? PREV_INSN (after) : get_last_insn ())
4037 : NEXT_INSN (before);
4040 /* Make and return an INSN rtx, initializing all its slots.
4041 Store PATTERN in the pattern slots. */
4043 rtx_insn *
4044 make_insn_raw (rtx pattern)
4046 rtx_insn *insn;
4048 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4050 INSN_UID (insn) = cur_insn_uid++;
4051 PATTERN (insn) = pattern;
4052 INSN_CODE (insn) = -1;
4053 REG_NOTES (insn) = NULL;
4054 INSN_LOCATION (insn) = curr_insn_location ();
4055 BLOCK_FOR_INSN (insn) = NULL;
4057 #ifdef ENABLE_RTL_CHECKING
4058 if (insn
4059 && INSN_P (insn)
4060 && (returnjump_p (insn)
4061 || (GET_CODE (insn) == SET
4062 && SET_DEST (insn) == pc_rtx)))
4064 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4065 debug_rtx (insn);
4067 #endif
4069 return insn;
4072 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4074 static rtx_insn *
4075 make_debug_insn_raw (rtx pattern)
4077 rtx_debug_insn *insn;
4079 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4080 INSN_UID (insn) = cur_debug_insn_uid++;
4081 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4082 INSN_UID (insn) = cur_insn_uid++;
4084 PATTERN (insn) = pattern;
4085 INSN_CODE (insn) = -1;
4086 REG_NOTES (insn) = NULL;
4087 INSN_LOCATION (insn) = curr_insn_location ();
4088 BLOCK_FOR_INSN (insn) = NULL;
4090 return insn;
4093 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4095 static rtx_insn *
4096 make_jump_insn_raw (rtx pattern)
4098 rtx_jump_insn *insn;
4100 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4101 INSN_UID (insn) = cur_insn_uid++;
4103 PATTERN (insn) = pattern;
4104 INSN_CODE (insn) = -1;
4105 REG_NOTES (insn) = NULL;
4106 JUMP_LABEL (insn) = NULL;
4107 INSN_LOCATION (insn) = curr_insn_location ();
4108 BLOCK_FOR_INSN (insn) = NULL;
4110 return insn;
4113 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4115 static rtx_insn *
4116 make_call_insn_raw (rtx pattern)
4118 rtx_call_insn *insn;
4120 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4121 INSN_UID (insn) = cur_insn_uid++;
4123 PATTERN (insn) = pattern;
4124 INSN_CODE (insn) = -1;
4125 REG_NOTES (insn) = NULL;
4126 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4127 INSN_LOCATION (insn) = curr_insn_location ();
4128 BLOCK_FOR_INSN (insn) = NULL;
4130 return insn;
4133 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4135 static rtx_note *
4136 make_note_raw (enum insn_note subtype)
4138 /* Some notes are never created this way at all. These notes are
4139 only created by patching out insns. */
4140 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4141 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4143 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4144 INSN_UID (note) = cur_insn_uid++;
4145 NOTE_KIND (note) = subtype;
4146 BLOCK_FOR_INSN (note) = NULL;
4147 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4148 return note;
4151 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4152 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4153 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4155 static inline void
4156 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4158 SET_PREV_INSN (insn) = prev;
4159 SET_NEXT_INSN (insn) = next;
4160 if (prev != NULL)
4162 SET_NEXT_INSN (prev) = insn;
4163 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4166 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4169 if (next != NULL)
4171 SET_PREV_INSN (next) = insn;
4172 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4174 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4175 SET_PREV_INSN (sequence->insn (0)) = insn;
4179 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4181 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4182 SET_PREV_INSN (sequence->insn (0)) = prev;
4183 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4187 /* Add INSN to the end of the doubly-linked list.
4188 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4190 void
4191 add_insn (rtx_insn *insn)
4193 rtx_insn *prev = get_last_insn ();
4194 link_insn_into_chain (insn, prev, NULL);
4195 if (get_insns () == NULL)
4196 set_first_insn (insn);
4197 set_last_insn (insn);
4200 /* Add INSN into the doubly-linked list after insn AFTER. */
4202 static void
4203 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4205 rtx_insn *next = NEXT_INSN (after);
4207 gcc_assert (!optimize || !after->deleted ());
4209 link_insn_into_chain (insn, after, next);
4211 if (next == NULL)
4213 struct sequence_stack *seq;
4215 for (seq = get_current_sequence (); seq; seq = seq->next)
4216 if (after == seq->last)
4218 seq->last = insn;
4219 break;
4224 /* Add INSN into the doubly-linked list before insn BEFORE. */
4226 static void
4227 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4229 rtx_insn *prev = PREV_INSN (before);
4231 gcc_assert (!optimize || !before->deleted ());
4233 link_insn_into_chain (insn, prev, before);
4235 if (prev == NULL)
4237 struct sequence_stack *seq;
4239 for (seq = get_current_sequence (); seq; seq = seq->next)
4240 if (before == seq->first)
4242 seq->first = insn;
4243 break;
4246 gcc_assert (seq);
4250 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4251 If BB is NULL, an attempt is made to infer the bb from before.
4253 This and the next function should be the only functions called
4254 to insert an insn once delay slots have been filled since only
4255 they know how to update a SEQUENCE. */
4257 void
4258 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4260 add_insn_after_nobb (insn, after);
4261 if (!BARRIER_P (after)
4262 && !BARRIER_P (insn)
4263 && (bb = BLOCK_FOR_INSN (after)))
4265 set_block_for_insn (insn, bb);
4266 if (INSN_P (insn))
4267 df_insn_rescan (insn);
4268 /* Should not happen as first in the BB is always
4269 either NOTE or LABEL. */
4270 if (BB_END (bb) == after
4271 /* Avoid clobbering of structure when creating new BB. */
4272 && !BARRIER_P (insn)
4273 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4274 BB_END (bb) = insn;
4278 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4279 If BB is NULL, an attempt is made to infer the bb from before.
4281 This and the previous function should be the only functions called
4282 to insert an insn once delay slots have been filled since only
4283 they know how to update a SEQUENCE. */
4285 void
4286 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4288 add_insn_before_nobb (insn, before);
4290 if (!bb
4291 && !BARRIER_P (before)
4292 && !BARRIER_P (insn))
4293 bb = BLOCK_FOR_INSN (before);
4295 if (bb)
4297 set_block_for_insn (insn, bb);
4298 if (INSN_P (insn))
4299 df_insn_rescan (insn);
4300 /* Should not happen as first in the BB is always either NOTE or
4301 LABEL. */
4302 gcc_assert (BB_HEAD (bb) != insn
4303 /* Avoid clobbering of structure when creating new BB. */
4304 || BARRIER_P (insn)
4305 || NOTE_INSN_BASIC_BLOCK_P (insn));
4309 /* Replace insn with an deleted instruction note. */
4311 void
4312 set_insn_deleted (rtx_insn *insn)
4314 if (INSN_P (insn))
4315 df_insn_delete (insn);
4316 PUT_CODE (insn, NOTE);
4317 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4321 /* Unlink INSN from the insn chain.
4323 This function knows how to handle sequences.
4325 This function does not invalidate data flow information associated with
4326 INSN (i.e. does not call df_insn_delete). That makes this function
4327 usable for only disconnecting an insn from the chain, and re-emit it
4328 elsewhere later.
4330 To later insert INSN elsewhere in the insn chain via add_insn and
4331 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4332 the caller. Nullifying them here breaks many insn chain walks.
4334 To really delete an insn and related DF information, use delete_insn. */
4336 void
4337 remove_insn (rtx_insn *insn)
4339 rtx_insn *next = NEXT_INSN (insn);
4340 rtx_insn *prev = PREV_INSN (insn);
4341 basic_block bb;
4343 if (prev)
4345 SET_NEXT_INSN (prev) = next;
4346 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4348 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4349 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4352 else
4354 struct sequence_stack *seq;
4356 for (seq = get_current_sequence (); seq; seq = seq->next)
4357 if (insn == seq->first)
4359 seq->first = next;
4360 break;
4363 gcc_assert (seq);
4366 if (next)
4368 SET_PREV_INSN (next) = prev;
4369 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4371 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4372 SET_PREV_INSN (sequence->insn (0)) = prev;
4375 else
4377 struct sequence_stack *seq;
4379 for (seq = get_current_sequence (); seq; seq = seq->next)
4380 if (insn == seq->last)
4382 seq->last = prev;
4383 break;
4386 gcc_assert (seq);
4389 /* Fix up basic block boundaries, if necessary. */
4390 if (!BARRIER_P (insn)
4391 && (bb = BLOCK_FOR_INSN (insn)))
4393 if (BB_HEAD (bb) == insn)
4395 /* Never ever delete the basic block note without deleting whole
4396 basic block. */
4397 gcc_assert (!NOTE_P (insn));
4398 BB_HEAD (bb) = next;
4400 if (BB_END (bb) == insn)
4401 BB_END (bb) = prev;
4405 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4407 void
4408 add_function_usage_to (rtx call_insn, rtx call_fusage)
4410 gcc_assert (call_insn && CALL_P (call_insn));
4412 /* Put the register usage information on the CALL. If there is already
4413 some usage information, put ours at the end. */
4414 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4416 rtx link;
4418 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4419 link = XEXP (link, 1))
4422 XEXP (link, 1) = call_fusage;
4424 else
4425 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4428 /* Delete all insns made since FROM.
4429 FROM becomes the new last instruction. */
4431 void
4432 delete_insns_since (rtx_insn *from)
4434 if (from == 0)
4435 set_first_insn (0);
4436 else
4437 SET_NEXT_INSN (from) = 0;
4438 set_last_insn (from);
4441 /* This function is deprecated, please use sequences instead.
4443 Move a consecutive bunch of insns to a different place in the chain.
4444 The insns to be moved are those between FROM and TO.
4445 They are moved to a new position after the insn AFTER.
4446 AFTER must not be FROM or TO or any insn in between.
4448 This function does not know about SEQUENCEs and hence should not be
4449 called after delay-slot filling has been done. */
4451 void
4452 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4454 if (flag_checking)
4456 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4457 gcc_assert (after != x);
4458 gcc_assert (after != to);
4461 /* Splice this bunch out of where it is now. */
4462 if (PREV_INSN (from))
4463 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4464 if (NEXT_INSN (to))
4465 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4466 if (get_last_insn () == to)
4467 set_last_insn (PREV_INSN (from));
4468 if (get_insns () == from)
4469 set_first_insn (NEXT_INSN (to));
4471 /* Make the new neighbors point to it and it to them. */
4472 if (NEXT_INSN (after))
4473 SET_PREV_INSN (NEXT_INSN (after)) = to;
4475 SET_NEXT_INSN (to) = NEXT_INSN (after);
4476 SET_PREV_INSN (from) = after;
4477 SET_NEXT_INSN (after) = from;
4478 if (after == get_last_insn ())
4479 set_last_insn (to);
4482 /* Same as function above, but take care to update BB boundaries. */
4483 void
4484 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4486 rtx_insn *prev = PREV_INSN (from);
4487 basic_block bb, bb2;
4489 reorder_insns_nobb (from, to, after);
4491 if (!BARRIER_P (after)
4492 && (bb = BLOCK_FOR_INSN (after)))
4494 rtx_insn *x;
4495 df_set_bb_dirty (bb);
4497 if (!BARRIER_P (from)
4498 && (bb2 = BLOCK_FOR_INSN (from)))
4500 if (BB_END (bb2) == to)
4501 BB_END (bb2) = prev;
4502 df_set_bb_dirty (bb2);
4505 if (BB_END (bb) == after)
4506 BB_END (bb) = to;
4508 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4509 if (!BARRIER_P (x))
4510 df_insn_change_bb (x, bb);
4515 /* Emit insn(s) of given code and pattern
4516 at a specified place within the doubly-linked list.
4518 All of the emit_foo global entry points accept an object
4519 X which is either an insn list or a PATTERN of a single
4520 instruction.
4522 There are thus a few canonical ways to generate code and
4523 emit it at a specific place in the instruction stream. For
4524 example, consider the instruction named SPOT and the fact that
4525 we would like to emit some instructions before SPOT. We might
4526 do it like this:
4528 start_sequence ();
4529 ... emit the new instructions ...
4530 insns_head = get_insns ();
4531 end_sequence ();
4533 emit_insn_before (insns_head, SPOT);
4535 It used to be common to generate SEQUENCE rtl instead, but that
4536 is a relic of the past which no longer occurs. The reason is that
4537 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4538 generated would almost certainly die right after it was created. */
4540 static rtx_insn *
4541 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4542 basic_block bb,
4543 rtx_insn *(*make_raw) (rtx))
4545 rtx_insn *insn;
4547 gcc_assert (before);
4549 if (x == NULL_RTX)
4550 return last;
4552 switch (GET_CODE (x))
4554 case DEBUG_INSN:
4555 case INSN:
4556 case JUMP_INSN:
4557 case CALL_INSN:
4558 case CODE_LABEL:
4559 case BARRIER:
4560 case NOTE:
4561 insn = as_a <rtx_insn *> (x);
4562 while (insn)
4564 rtx_insn *next = NEXT_INSN (insn);
4565 add_insn_before (insn, before, bb);
4566 last = insn;
4567 insn = next;
4569 break;
4571 #ifdef ENABLE_RTL_CHECKING
4572 case SEQUENCE:
4573 gcc_unreachable ();
4574 break;
4575 #endif
4577 default:
4578 last = (*make_raw) (x);
4579 add_insn_before (last, before, bb);
4580 break;
4583 return last;
4586 /* Make X be output before the instruction BEFORE. */
4588 rtx_insn *
4589 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4591 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4594 /* Make an instruction with body X and code JUMP_INSN
4595 and output it before the instruction BEFORE. */
4597 rtx_jump_insn *
4598 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4600 return as_a <rtx_jump_insn *> (
4601 emit_pattern_before_noloc (x, before, NULL, NULL,
4602 make_jump_insn_raw));
4605 /* Make an instruction with body X and code CALL_INSN
4606 and output it before the instruction BEFORE. */
4608 rtx_insn *
4609 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4611 return emit_pattern_before_noloc (x, before, NULL, NULL,
4612 make_call_insn_raw);
4615 /* Make an instruction with body X and code DEBUG_INSN
4616 and output it before the instruction BEFORE. */
4618 rtx_insn *
4619 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4621 return emit_pattern_before_noloc (x, before, NULL, NULL,
4622 make_debug_insn_raw);
4625 /* Make an insn of code BARRIER
4626 and output it before the insn BEFORE. */
4628 rtx_barrier *
4629 emit_barrier_before (rtx_insn *before)
4631 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4633 INSN_UID (insn) = cur_insn_uid++;
4635 add_insn_before (insn, before, NULL);
4636 return insn;
4639 /* Emit the label LABEL before the insn BEFORE. */
4641 rtx_code_label *
4642 emit_label_before (rtx_code_label *label, rtx_insn *before)
4644 gcc_checking_assert (INSN_UID (label) == 0);
4645 INSN_UID (label) = cur_insn_uid++;
4646 add_insn_before (label, before, NULL);
4647 return label;
4650 /* Helper for emit_insn_after, handles lists of instructions
4651 efficiently. */
4653 static rtx_insn *
4654 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4656 rtx_insn *last;
4657 rtx_insn *after_after;
4658 if (!bb && !BARRIER_P (after))
4659 bb = BLOCK_FOR_INSN (after);
4661 if (bb)
4663 df_set_bb_dirty (bb);
4664 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4665 if (!BARRIER_P (last))
4667 set_block_for_insn (last, bb);
4668 df_insn_rescan (last);
4670 if (!BARRIER_P (last))
4672 set_block_for_insn (last, bb);
4673 df_insn_rescan (last);
4675 if (BB_END (bb) == after)
4676 BB_END (bb) = last;
4678 else
4679 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4680 continue;
4682 after_after = NEXT_INSN (after);
4684 SET_NEXT_INSN (after) = first;
4685 SET_PREV_INSN (first) = after;
4686 SET_NEXT_INSN (last) = after_after;
4687 if (after_after)
4688 SET_PREV_INSN (after_after) = last;
4690 if (after == get_last_insn ())
4691 set_last_insn (last);
4693 return last;
4696 static rtx_insn *
4697 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4698 rtx_insn *(*make_raw)(rtx))
4700 rtx_insn *last = after;
4702 gcc_assert (after);
4704 if (x == NULL_RTX)
4705 return last;
4707 switch (GET_CODE (x))
4709 case DEBUG_INSN:
4710 case INSN:
4711 case JUMP_INSN:
4712 case CALL_INSN:
4713 case CODE_LABEL:
4714 case BARRIER:
4715 case NOTE:
4716 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4717 break;
4719 #ifdef ENABLE_RTL_CHECKING
4720 case SEQUENCE:
4721 gcc_unreachable ();
4722 break;
4723 #endif
4725 default:
4726 last = (*make_raw) (x);
4727 add_insn_after (last, after, bb);
4728 break;
4731 return last;
4734 /* Make X be output after the insn AFTER and set the BB of insn. If
4735 BB is NULL, an attempt is made to infer the BB from AFTER. */
4737 rtx_insn *
4738 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4740 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4744 /* Make an insn of code JUMP_INSN with body X
4745 and output it after the insn AFTER. */
4747 rtx_jump_insn *
4748 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4750 return as_a <rtx_jump_insn *> (
4751 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4754 /* Make an instruction with body X and code CALL_INSN
4755 and output it after the instruction AFTER. */
4757 rtx_insn *
4758 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4760 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4763 /* Make an instruction with body X and code CALL_INSN
4764 and output it after the instruction AFTER. */
4766 rtx_insn *
4767 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4769 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4772 /* Make an insn of code BARRIER
4773 and output it after the insn AFTER. */
4775 rtx_barrier *
4776 emit_barrier_after (rtx_insn *after)
4778 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4780 INSN_UID (insn) = cur_insn_uid++;
4782 add_insn_after (insn, after, NULL);
4783 return insn;
4786 /* Emit the label LABEL after the insn AFTER. */
4788 rtx_insn *
4789 emit_label_after (rtx_insn *label, rtx_insn *after)
4791 gcc_checking_assert (INSN_UID (label) == 0);
4792 INSN_UID (label) = cur_insn_uid++;
4793 add_insn_after (label, after, NULL);
4794 return label;
4797 /* Notes require a bit of special handling: Some notes need to have their
4798 BLOCK_FOR_INSN set, others should never have it set, and some should
4799 have it set or clear depending on the context. */
4801 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4802 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4803 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4805 static bool
4806 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4808 switch (subtype)
4810 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4811 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4812 return true;
4814 /* Notes for var tracking and EH region markers can appear between or
4815 inside basic blocks. If the caller is emitting on the basic block
4816 boundary, do not set BLOCK_FOR_INSN on the new note. */
4817 case NOTE_INSN_VAR_LOCATION:
4818 case NOTE_INSN_EH_REGION_BEG:
4819 case NOTE_INSN_EH_REGION_END:
4820 return on_bb_boundary_p;
4822 /* Otherwise, BLOCK_FOR_INSN must be set. */
4823 default:
4824 return false;
4828 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4830 rtx_note *
4831 emit_note_after (enum insn_note subtype, rtx_insn *after)
4833 rtx_note *note = make_note_raw (subtype);
4834 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4835 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4837 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4838 add_insn_after_nobb (note, after);
4839 else
4840 add_insn_after (note, after, bb);
4841 return note;
4844 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4846 rtx_note *
4847 emit_note_before (enum insn_note subtype, rtx_insn *before)
4849 rtx_note *note = make_note_raw (subtype);
4850 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4851 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4853 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4854 add_insn_before_nobb (note, before);
4855 else
4856 add_insn_before (note, before, bb);
4857 return note;
4860 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4861 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4863 static rtx_insn *
4864 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4865 rtx_insn *(*make_raw) (rtx))
4867 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4869 if (pattern == NULL_RTX || !loc)
4870 return last;
4872 after = NEXT_INSN (after);
4873 while (1)
4875 if (active_insn_p (after)
4876 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4877 && !INSN_LOCATION (after))
4878 INSN_LOCATION (after) = loc;
4879 if (after == last)
4880 break;
4881 after = NEXT_INSN (after);
4883 return last;
4886 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4887 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4888 any DEBUG_INSNs. */
4890 static rtx_insn *
4891 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4892 rtx_insn *(*make_raw) (rtx))
4894 rtx_insn *prev = after;
4896 if (skip_debug_insns)
4897 while (DEBUG_INSN_P (prev))
4898 prev = PREV_INSN (prev);
4900 if (INSN_P (prev))
4901 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4902 make_raw);
4903 else
4904 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4907 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4908 rtx_insn *
4909 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4911 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4914 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4915 rtx_insn *
4916 emit_insn_after (rtx pattern, rtx_insn *after)
4918 return emit_pattern_after (pattern, after, true, make_insn_raw);
4921 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4922 rtx_jump_insn *
4923 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4925 return as_a <rtx_jump_insn *> (
4926 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4929 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4930 rtx_jump_insn *
4931 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4933 return as_a <rtx_jump_insn *> (
4934 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4937 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4938 rtx_insn *
4939 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4941 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4944 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4945 rtx_insn *
4946 emit_call_insn_after (rtx pattern, rtx_insn *after)
4948 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4951 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4952 rtx_insn *
4953 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4955 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4958 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4959 rtx_insn *
4960 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4962 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4965 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4966 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4967 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4968 CALL_INSN, etc. */
4970 static rtx_insn *
4971 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4972 bool insnp, rtx_insn *(*make_raw) (rtx))
4974 rtx_insn *first = PREV_INSN (before);
4975 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4976 insnp ? before : NULL,
4977 NULL, make_raw);
4979 if (pattern == NULL_RTX || !loc)
4980 return last;
4982 if (!first)
4983 first = get_insns ();
4984 else
4985 first = NEXT_INSN (first);
4986 while (1)
4988 if (active_insn_p (first)
4989 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4990 && !INSN_LOCATION (first))
4991 INSN_LOCATION (first) = loc;
4992 if (first == last)
4993 break;
4994 first = NEXT_INSN (first);
4996 return last;
4999 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
5000 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
5001 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
5002 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
5004 static rtx_insn *
5005 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
5006 bool insnp, rtx_insn *(*make_raw) (rtx))
5008 rtx_insn *next = before;
5010 if (skip_debug_insns)
5011 while (DEBUG_INSN_P (next))
5012 next = PREV_INSN (next);
5014 if (INSN_P (next))
5015 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
5016 insnp, make_raw);
5017 else
5018 return emit_pattern_before_noloc (pattern, before,
5019 insnp ? before : NULL,
5020 NULL, make_raw);
5023 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5024 rtx_insn *
5025 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5027 return emit_pattern_before_setloc (pattern, before, loc, true,
5028 make_insn_raw);
5031 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5032 rtx_insn *
5033 emit_insn_before (rtx pattern, rtx_insn *before)
5035 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5038 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5039 rtx_jump_insn *
5040 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5042 return as_a <rtx_jump_insn *> (
5043 emit_pattern_before_setloc (pattern, before, loc, false,
5044 make_jump_insn_raw));
5047 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5048 rtx_jump_insn *
5049 emit_jump_insn_before (rtx pattern, rtx_insn *before)
5051 return as_a <rtx_jump_insn *> (
5052 emit_pattern_before (pattern, before, true, false,
5053 make_jump_insn_raw));
5056 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5057 rtx_insn *
5058 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5060 return emit_pattern_before_setloc (pattern, before, loc, false,
5061 make_call_insn_raw);
5064 /* Like emit_call_insn_before_noloc,
5065 but set insn_location according to BEFORE. */
5066 rtx_insn *
5067 emit_call_insn_before (rtx pattern, rtx_insn *before)
5069 return emit_pattern_before (pattern, before, true, false,
5070 make_call_insn_raw);
5073 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5074 rtx_insn *
5075 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5077 return emit_pattern_before_setloc (pattern, before, loc, false,
5078 make_debug_insn_raw);
5081 /* Like emit_debug_insn_before_noloc,
5082 but set insn_location according to BEFORE. */
5083 rtx_insn *
5084 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5086 return emit_pattern_before (pattern, before, false, false,
5087 make_debug_insn_raw);
5090 /* Take X and emit it at the end of the doubly-linked
5091 INSN list.
5093 Returns the last insn emitted. */
5095 rtx_insn *
5096 emit_insn (rtx x)
5098 rtx_insn *last = get_last_insn ();
5099 rtx_insn *insn;
5101 if (x == NULL_RTX)
5102 return last;
5104 switch (GET_CODE (x))
5106 case DEBUG_INSN:
5107 case INSN:
5108 case JUMP_INSN:
5109 case CALL_INSN:
5110 case CODE_LABEL:
5111 case BARRIER:
5112 case NOTE:
5113 insn = as_a <rtx_insn *> (x);
5114 while (insn)
5116 rtx_insn *next = NEXT_INSN (insn);
5117 add_insn (insn);
5118 last = insn;
5119 insn = next;
5121 break;
5123 #ifdef ENABLE_RTL_CHECKING
5124 case JUMP_TABLE_DATA:
5125 case SEQUENCE:
5126 gcc_unreachable ();
5127 break;
5128 #endif
5130 default:
5131 last = make_insn_raw (x);
5132 add_insn (last);
5133 break;
5136 return last;
5139 /* Make an insn of code DEBUG_INSN with pattern X
5140 and add it to the end of the doubly-linked list. */
5142 rtx_insn *
5143 emit_debug_insn (rtx x)
5145 rtx_insn *last = get_last_insn ();
5146 rtx_insn *insn;
5148 if (x == NULL_RTX)
5149 return last;
5151 switch (GET_CODE (x))
5153 case DEBUG_INSN:
5154 case INSN:
5155 case JUMP_INSN:
5156 case CALL_INSN:
5157 case CODE_LABEL:
5158 case BARRIER:
5159 case NOTE:
5160 insn = as_a <rtx_insn *> (x);
5161 while (insn)
5163 rtx_insn *next = NEXT_INSN (insn);
5164 add_insn (insn);
5165 last = insn;
5166 insn = next;
5168 break;
5170 #ifdef ENABLE_RTL_CHECKING
5171 case JUMP_TABLE_DATA:
5172 case SEQUENCE:
5173 gcc_unreachable ();
5174 break;
5175 #endif
5177 default:
5178 last = make_debug_insn_raw (x);
5179 add_insn (last);
5180 break;
5183 return last;
5186 /* Make an insn of code JUMP_INSN with pattern X
5187 and add it to the end of the doubly-linked list. */
5189 rtx_insn *
5190 emit_jump_insn (rtx x)
5192 rtx_insn *last = NULL;
5193 rtx_insn *insn;
5195 switch (GET_CODE (x))
5197 case DEBUG_INSN:
5198 case INSN:
5199 case JUMP_INSN:
5200 case CALL_INSN:
5201 case CODE_LABEL:
5202 case BARRIER:
5203 case NOTE:
5204 insn = as_a <rtx_insn *> (x);
5205 while (insn)
5207 rtx_insn *next = NEXT_INSN (insn);
5208 add_insn (insn);
5209 last = insn;
5210 insn = next;
5212 break;
5214 #ifdef ENABLE_RTL_CHECKING
5215 case JUMP_TABLE_DATA:
5216 case SEQUENCE:
5217 gcc_unreachable ();
5218 break;
5219 #endif
5221 default:
5222 last = make_jump_insn_raw (x);
5223 add_insn (last);
5224 break;
5227 return last;
5230 /* Make an insn of code CALL_INSN with pattern X
5231 and add it to the end of the doubly-linked list. */
5233 rtx_insn *
5234 emit_call_insn (rtx x)
5236 rtx_insn *insn;
5238 switch (GET_CODE (x))
5240 case DEBUG_INSN:
5241 case INSN:
5242 case JUMP_INSN:
5243 case CALL_INSN:
5244 case CODE_LABEL:
5245 case BARRIER:
5246 case NOTE:
5247 insn = emit_insn (x);
5248 break;
5250 #ifdef ENABLE_RTL_CHECKING
5251 case SEQUENCE:
5252 case JUMP_TABLE_DATA:
5253 gcc_unreachable ();
5254 break;
5255 #endif
5257 default:
5258 insn = make_call_insn_raw (x);
5259 add_insn (insn);
5260 break;
5263 return insn;
5266 /* Add the label LABEL to the end of the doubly-linked list. */
5268 rtx_code_label *
5269 emit_label (rtx uncast_label)
5271 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5273 gcc_checking_assert (INSN_UID (label) == 0);
5274 INSN_UID (label) = cur_insn_uid++;
5275 add_insn (label);
5276 return label;
5279 /* Make an insn of code JUMP_TABLE_DATA
5280 and add it to the end of the doubly-linked list. */
5282 rtx_jump_table_data *
5283 emit_jump_table_data (rtx table)
5285 rtx_jump_table_data *jump_table_data =
5286 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5287 INSN_UID (jump_table_data) = cur_insn_uid++;
5288 PATTERN (jump_table_data) = table;
5289 BLOCK_FOR_INSN (jump_table_data) = NULL;
5290 add_insn (jump_table_data);
5291 return jump_table_data;
5294 /* Make an insn of code BARRIER
5295 and add it to the end of the doubly-linked list. */
5297 rtx_barrier *
5298 emit_barrier (void)
5300 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5301 INSN_UID (barrier) = cur_insn_uid++;
5302 add_insn (barrier);
5303 return barrier;
5306 /* Emit a copy of note ORIG. */
5308 rtx_note *
5309 emit_note_copy (rtx_note *orig)
5311 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5312 rtx_note *note = make_note_raw (kind);
5313 NOTE_DATA (note) = NOTE_DATA (orig);
5314 add_insn (note);
5315 return note;
5318 /* Make an insn of code NOTE or type NOTE_NO
5319 and add it to the end of the doubly-linked list. */
5321 rtx_note *
5322 emit_note (enum insn_note kind)
5324 rtx_note *note = make_note_raw (kind);
5325 add_insn (note);
5326 return note;
5329 /* Emit a clobber of lvalue X. */
5331 rtx_insn *
5332 emit_clobber (rtx x)
5334 /* CONCATs should not appear in the insn stream. */
5335 if (GET_CODE (x) == CONCAT)
5337 emit_clobber (XEXP (x, 0));
5338 return emit_clobber (XEXP (x, 1));
5340 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5343 /* Return a sequence of insns to clobber lvalue X. */
5345 rtx_insn *
5346 gen_clobber (rtx x)
5348 rtx_insn *seq;
5350 start_sequence ();
5351 emit_clobber (x);
5352 seq = get_insns ();
5353 end_sequence ();
5354 return seq;
5357 /* Emit a use of rvalue X. */
5359 rtx_insn *
5360 emit_use (rtx x)
5362 /* CONCATs should not appear in the insn stream. */
5363 if (GET_CODE (x) == CONCAT)
5365 emit_use (XEXP (x, 0));
5366 return emit_use (XEXP (x, 1));
5368 return emit_insn (gen_rtx_USE (VOIDmode, x));
5371 /* Return a sequence of insns to use rvalue X. */
5373 rtx_insn *
5374 gen_use (rtx x)
5376 rtx_insn *seq;
5378 start_sequence ();
5379 emit_use (x);
5380 seq = get_insns ();
5381 end_sequence ();
5382 return seq;
5385 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5386 Return the set in INSN that such notes describe, or NULL if the notes
5387 have no meaning for INSN. */
5390 set_for_reg_notes (rtx insn)
5392 rtx pat, reg;
5394 if (!INSN_P (insn))
5395 return NULL_RTX;
5397 pat = PATTERN (insn);
5398 if (GET_CODE (pat) == PARALLEL)
5400 /* We do not use single_set because that ignores SETs of unused
5401 registers. REG_EQUAL and REG_EQUIV notes really do require the
5402 PARALLEL to have a single SET. */
5403 if (multiple_sets (insn))
5404 return NULL_RTX;
5405 pat = XVECEXP (pat, 0, 0);
5408 if (GET_CODE (pat) != SET)
5409 return NULL_RTX;
5411 reg = SET_DEST (pat);
5413 /* Notes apply to the contents of a STRICT_LOW_PART. */
5414 if (GET_CODE (reg) == STRICT_LOW_PART
5415 || GET_CODE (reg) == ZERO_EXTRACT)
5416 reg = XEXP (reg, 0);
5418 /* Check that we have a register. */
5419 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5420 return NULL_RTX;
5422 return pat;
5425 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5426 note of this type already exists, remove it first. */
5429 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5431 rtx note = find_reg_note (insn, kind, NULL_RTX);
5433 switch (kind)
5435 case REG_EQUAL:
5436 case REG_EQUIV:
5437 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5438 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5439 return NULL_RTX;
5441 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5442 It serves no useful purpose and breaks eliminate_regs. */
5443 if (GET_CODE (datum) == ASM_OPERANDS)
5444 return NULL_RTX;
5446 /* Notes with side effects are dangerous. Even if the side-effect
5447 initially mirrors one in PATTERN (INSN), later optimizations
5448 might alter the way that the final register value is calculated
5449 and so move or alter the side-effect in some way. The note would
5450 then no longer be a valid substitution for SET_SRC. */
5451 if (side_effects_p (datum))
5452 return NULL_RTX;
5453 break;
5455 default:
5456 break;
5459 if (note)
5460 XEXP (note, 0) = datum;
5461 else
5463 add_reg_note (insn, kind, datum);
5464 note = REG_NOTES (insn);
5467 switch (kind)
5469 case REG_EQUAL:
5470 case REG_EQUIV:
5471 df_notes_rescan (as_a <rtx_insn *> (insn));
5472 break;
5473 default:
5474 break;
5477 return note;
5480 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5482 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5484 rtx set = set_for_reg_notes (insn);
5486 if (set && SET_DEST (set) == dst)
5487 return set_unique_reg_note (insn, kind, datum);
5488 return NULL_RTX;
5491 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5492 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5493 is true.
5495 If X is a label, it is simply added into the insn chain. */
5497 rtx_insn *
5498 emit (rtx x, bool allow_barrier_p)
5500 enum rtx_code code = classify_insn (x);
5502 switch (code)
5504 case CODE_LABEL:
5505 return emit_label (x);
5506 case INSN:
5507 return emit_insn (x);
5508 case JUMP_INSN:
5510 rtx_insn *insn = emit_jump_insn (x);
5511 if (allow_barrier_p
5512 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5513 return emit_barrier ();
5514 return insn;
5516 case CALL_INSN:
5517 return emit_call_insn (x);
5518 case DEBUG_INSN:
5519 return emit_debug_insn (x);
5520 default:
5521 gcc_unreachable ();
5525 /* Space for free sequence stack entries. */
5526 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5528 /* Begin emitting insns to a sequence. If this sequence will contain
5529 something that might cause the compiler to pop arguments to function
5530 calls (because those pops have previously been deferred; see
5531 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5532 before calling this function. That will ensure that the deferred
5533 pops are not accidentally emitted in the middle of this sequence. */
5535 void
5536 start_sequence (void)
5538 struct sequence_stack *tem;
5540 if (free_sequence_stack != NULL)
5542 tem = free_sequence_stack;
5543 free_sequence_stack = tem->next;
5545 else
5546 tem = ggc_alloc<sequence_stack> ();
5548 tem->next = get_current_sequence ()->next;
5549 tem->first = get_insns ();
5550 tem->last = get_last_insn ();
5551 get_current_sequence ()->next = tem;
5553 set_first_insn (0);
5554 set_last_insn (0);
5557 /* Set up the insn chain starting with FIRST as the current sequence,
5558 saving the previously current one. See the documentation for
5559 start_sequence for more information about how to use this function. */
5561 void
5562 push_to_sequence (rtx_insn *first)
5564 rtx_insn *last;
5566 start_sequence ();
5568 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5571 set_first_insn (first);
5572 set_last_insn (last);
5575 /* Like push_to_sequence, but take the last insn as an argument to avoid
5576 looping through the list. */
5578 void
5579 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5581 start_sequence ();
5583 set_first_insn (first);
5584 set_last_insn (last);
5587 /* Set up the outer-level insn chain
5588 as the current sequence, saving the previously current one. */
5590 void
5591 push_topmost_sequence (void)
5593 struct sequence_stack *top;
5595 start_sequence ();
5597 top = get_topmost_sequence ();
5598 set_first_insn (top->first);
5599 set_last_insn (top->last);
5602 /* After emitting to the outer-level insn chain, update the outer-level
5603 insn chain, and restore the previous saved state. */
5605 void
5606 pop_topmost_sequence (void)
5608 struct sequence_stack *top;
5610 top = get_topmost_sequence ();
5611 top->first = get_insns ();
5612 top->last = get_last_insn ();
5614 end_sequence ();
5617 /* After emitting to a sequence, restore previous saved state.
5619 To get the contents of the sequence just made, you must call
5620 `get_insns' *before* calling here.
5622 If the compiler might have deferred popping arguments while
5623 generating this sequence, and this sequence will not be immediately
5624 inserted into the instruction stream, use do_pending_stack_adjust
5625 before calling get_insns. That will ensure that the deferred
5626 pops are inserted into this sequence, and not into some random
5627 location in the instruction stream. See INHIBIT_DEFER_POP for more
5628 information about deferred popping of arguments. */
5630 void
5631 end_sequence (void)
5633 struct sequence_stack *tem = get_current_sequence ()->next;
5635 set_first_insn (tem->first);
5636 set_last_insn (tem->last);
5637 get_current_sequence ()->next = tem->next;
5639 memset (tem, 0, sizeof (*tem));
5640 tem->next = free_sequence_stack;
5641 free_sequence_stack = tem;
5644 /* Return 1 if currently emitting into a sequence. */
5647 in_sequence_p (void)
5649 return get_current_sequence ()->next != 0;
5652 /* Put the various virtual registers into REGNO_REG_RTX. */
5654 static void
5655 init_virtual_regs (void)
5657 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5658 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5659 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5660 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5661 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5662 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5663 = virtual_preferred_stack_boundary_rtx;
5667 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5668 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5669 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5670 static int copy_insn_n_scratches;
5672 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5673 copied an ASM_OPERANDS.
5674 In that case, it is the original input-operand vector. */
5675 static rtvec orig_asm_operands_vector;
5677 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5678 copied an ASM_OPERANDS.
5679 In that case, it is the copied input-operand vector. */
5680 static rtvec copy_asm_operands_vector;
5682 /* Likewise for the constraints vector. */
5683 static rtvec orig_asm_constraints_vector;
5684 static rtvec copy_asm_constraints_vector;
5686 /* Recursively create a new copy of an rtx for copy_insn.
5687 This function differs from copy_rtx in that it handles SCRATCHes and
5688 ASM_OPERANDs properly.
5689 Normally, this function is not used directly; use copy_insn as front end.
5690 However, you could first copy an insn pattern with copy_insn and then use
5691 this function afterwards to properly copy any REG_NOTEs containing
5692 SCRATCHes. */
5695 copy_insn_1 (rtx orig)
5697 rtx copy;
5698 int i, j;
5699 RTX_CODE code;
5700 const char *format_ptr;
5702 if (orig == NULL)
5703 return NULL;
5705 code = GET_CODE (orig);
5707 switch (code)
5709 case REG:
5710 case DEBUG_EXPR:
5711 CASE_CONST_ANY:
5712 case SYMBOL_REF:
5713 case CODE_LABEL:
5714 case PC:
5715 case CC0:
5716 case RETURN:
5717 case SIMPLE_RETURN:
5718 return orig;
5719 case CLOBBER:
5720 case CLOBBER_HIGH:
5721 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5722 clobbers or clobbers of hard registers that originated as pseudos.
5723 This is needed to allow safe register renaming. */
5724 if (REG_P (XEXP (orig, 0))
5725 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5726 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5727 return orig;
5728 break;
5730 case SCRATCH:
5731 for (i = 0; i < copy_insn_n_scratches; i++)
5732 if (copy_insn_scratch_in[i] == orig)
5733 return copy_insn_scratch_out[i];
5734 break;
5736 case CONST:
5737 if (shared_const_p (orig))
5738 return orig;
5739 break;
5741 /* A MEM with a constant address is not sharable. The problem is that
5742 the constant address may need to be reloaded. If the mem is shared,
5743 then reloading one copy of this mem will cause all copies to appear
5744 to have been reloaded. */
5746 default:
5747 break;
5750 /* Copy the various flags, fields, and other information. We assume
5751 that all fields need copying, and then clear the fields that should
5752 not be copied. That is the sensible default behavior, and forces
5753 us to explicitly document why we are *not* copying a flag. */
5754 copy = shallow_copy_rtx (orig);
5756 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5757 if (INSN_P (orig))
5759 RTX_FLAG (copy, jump) = 0;
5760 RTX_FLAG (copy, call) = 0;
5761 RTX_FLAG (copy, frame_related) = 0;
5764 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5766 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5767 switch (*format_ptr++)
5769 case 'e':
5770 if (XEXP (orig, i) != NULL)
5771 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5772 break;
5774 case 'E':
5775 case 'V':
5776 if (XVEC (orig, i) == orig_asm_constraints_vector)
5777 XVEC (copy, i) = copy_asm_constraints_vector;
5778 else if (XVEC (orig, i) == orig_asm_operands_vector)
5779 XVEC (copy, i) = copy_asm_operands_vector;
5780 else if (XVEC (orig, i) != NULL)
5782 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5783 for (j = 0; j < XVECLEN (copy, i); j++)
5784 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5786 break;
5788 case 't':
5789 case 'w':
5790 case 'i':
5791 case 'p':
5792 case 's':
5793 case 'S':
5794 case 'u':
5795 case '0':
5796 /* These are left unchanged. */
5797 break;
5799 default:
5800 gcc_unreachable ();
5803 if (code == SCRATCH)
5805 i = copy_insn_n_scratches++;
5806 gcc_assert (i < MAX_RECOG_OPERANDS);
5807 copy_insn_scratch_in[i] = orig;
5808 copy_insn_scratch_out[i] = copy;
5810 else if (code == ASM_OPERANDS)
5812 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5813 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5814 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5815 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5818 return copy;
5821 /* Create a new copy of an rtx.
5822 This function differs from copy_rtx in that it handles SCRATCHes and
5823 ASM_OPERANDs properly.
5824 INSN doesn't really have to be a full INSN; it could be just the
5825 pattern. */
5827 copy_insn (rtx insn)
5829 copy_insn_n_scratches = 0;
5830 orig_asm_operands_vector = 0;
5831 orig_asm_constraints_vector = 0;
5832 copy_asm_operands_vector = 0;
5833 copy_asm_constraints_vector = 0;
5834 return copy_insn_1 (insn);
5837 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5838 on that assumption that INSN itself remains in its original place. */
5840 rtx_insn *
5841 copy_delay_slot_insn (rtx_insn *insn)
5843 /* Copy INSN with its rtx_code, all its notes, location etc. */
5844 insn = as_a <rtx_insn *> (copy_rtx (insn));
5845 INSN_UID (insn) = cur_insn_uid++;
5846 return insn;
5849 /* Initialize data structures and variables in this file
5850 before generating rtl for each function. */
5852 void
5853 init_emit (void)
5855 set_first_insn (NULL);
5856 set_last_insn (NULL);
5857 if (MIN_NONDEBUG_INSN_UID)
5858 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5859 else
5860 cur_insn_uid = 1;
5861 cur_debug_insn_uid = 1;
5862 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5863 first_label_num = label_num;
5864 get_current_sequence ()->next = NULL;
5866 /* Init the tables that describe all the pseudo regs. */
5868 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5870 crtl->emit.regno_pointer_align
5871 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5873 regno_reg_rtx
5874 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5876 /* Put copies of all the hard registers into regno_reg_rtx. */
5877 memcpy (regno_reg_rtx,
5878 initial_regno_reg_rtx,
5879 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5881 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5882 init_virtual_regs ();
5884 /* Indicate that the virtual registers and stack locations are
5885 all pointers. */
5886 REG_POINTER (stack_pointer_rtx) = 1;
5887 REG_POINTER (frame_pointer_rtx) = 1;
5888 REG_POINTER (hard_frame_pointer_rtx) = 1;
5889 REG_POINTER (arg_pointer_rtx) = 1;
5891 REG_POINTER (virtual_incoming_args_rtx) = 1;
5892 REG_POINTER (virtual_stack_vars_rtx) = 1;
5893 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5894 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5895 REG_POINTER (virtual_cfa_rtx) = 1;
5897 #ifdef STACK_BOUNDARY
5898 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5899 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5900 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5901 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5903 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5904 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5905 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5906 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5908 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5909 #endif
5911 #ifdef INIT_EXPANDERS
5912 INIT_EXPANDERS;
5913 #endif
5916 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5918 wide_int
5919 const_vector_int_elt (const_rtx x, unsigned int i)
5921 /* First handle elements that are directly encoded. */
5922 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5923 if (i < (unsigned int) XVECLEN (x, 0))
5924 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5926 /* Identify the pattern that contains element I and work out the index of
5927 the last encoded element for that pattern. */
5928 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5929 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5930 unsigned int count = i / npatterns;
5931 unsigned int pattern = i % npatterns;
5932 unsigned int final_i = encoded_nelts - npatterns + pattern;
5934 /* If there are no steps, the final encoded value is the right one. */
5935 if (!CONST_VECTOR_STEPPED_P (x))
5936 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5938 /* Otherwise work out the value from the last two encoded elements. */
5939 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5940 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5941 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5942 rtx_mode_t (v1, elt_mode));
5943 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5946 /* Return the value of element I of CONST_VECTOR X. */
5949 const_vector_elt (const_rtx x, unsigned int i)
5951 /* First handle elements that are directly encoded. */
5952 if (i < (unsigned int) XVECLEN (x, 0))
5953 return CONST_VECTOR_ENCODED_ELT (x, i);
5955 /* If there are no steps, the final encoded value is the right one. */
5956 if (!CONST_VECTOR_STEPPED_P (x))
5958 /* Identify the pattern that contains element I and work out the index of
5959 the last encoded element for that pattern. */
5960 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5961 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5962 unsigned int pattern = i % npatterns;
5963 unsigned int final_i = encoded_nelts - npatterns + pattern;
5964 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5967 /* Otherwise work out the value from the last two encoded elements. */
5968 return immed_wide_int_const (const_vector_int_elt (x, i),
5969 GET_MODE_INNER (GET_MODE (x)));
5972 /* Return true if X is a valid element for a CONST_VECTOR of the given
5973 mode. */
5975 bool
5976 valid_for_const_vector_p (machine_mode, rtx x)
5978 return (CONST_SCALAR_INT_P (x)
5979 || CONST_DOUBLE_AS_FLOAT_P (x)
5980 || CONST_FIXED_P (x));
5983 /* Generate a vector constant of mode MODE in which every element has
5984 value ELT. */
5987 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5989 rtx_vector_builder builder (mode, 1, 1);
5990 builder.quick_push (elt);
5991 return builder.build ();
5994 /* Return a vector rtx of mode MODE in which every element has value X.
5995 The result will be a constant if X is constant. */
5998 gen_vec_duplicate (machine_mode mode, rtx x)
6000 if (valid_for_const_vector_p (mode, x))
6001 return gen_const_vec_duplicate (mode, x);
6002 return gen_rtx_VEC_DUPLICATE (mode, x);
6005 /* A subroutine of const_vec_series_p that handles the case in which:
6007 (GET_CODE (X) == CONST_VECTOR
6008 && CONST_VECTOR_NPATTERNS (X) == 1
6009 && !CONST_VECTOR_DUPLICATE_P (X))
6011 is known to hold. */
6013 bool
6014 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
6016 /* Stepped sequences are only defined for integers, to avoid specifying
6017 rounding behavior. */
6018 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6019 return false;
6021 /* A non-duplicated vector with two elements can always be seen as a
6022 series with a nonzero step. Longer vectors must have a stepped
6023 encoding. */
6024 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6025 && !CONST_VECTOR_STEPPED_P (x))
6026 return false;
6028 /* Calculate the step between the first and second elements. */
6029 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6030 rtx base = CONST_VECTOR_ELT (x, 0);
6031 rtx step = simplify_binary_operation (MINUS, inner,
6032 CONST_VECTOR_ENCODED_ELT (x, 1), base);
6033 if (rtx_equal_p (step, CONST0_RTX (inner)))
6034 return false;
6036 /* If we have a stepped encoding, check that the step between the
6037 second and third elements is the same as STEP. */
6038 if (CONST_VECTOR_STEPPED_P (x))
6040 rtx diff = simplify_binary_operation (MINUS, inner,
6041 CONST_VECTOR_ENCODED_ELT (x, 2),
6042 CONST_VECTOR_ENCODED_ELT (x, 1));
6043 if (!rtx_equal_p (step, diff))
6044 return false;
6047 *base_out = base;
6048 *step_out = step;
6049 return true;
6052 /* Generate a vector constant of mode MODE in which element I has
6053 the value BASE + I * STEP. */
6056 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6058 gcc_assert (valid_for_const_vector_p (mode, base)
6059 && valid_for_const_vector_p (mode, step));
6061 rtx_vector_builder builder (mode, 1, 3);
6062 builder.quick_push (base);
6063 for (int i = 1; i < 3; ++i)
6064 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6065 builder[i - 1], step));
6066 return builder.build ();
6069 /* Generate a vector of mode MODE in which element I has the value
6070 BASE + I * STEP. The result will be a constant if BASE and STEP
6071 are both constants. */
6074 gen_vec_series (machine_mode mode, rtx base, rtx step)
6076 if (step == const0_rtx)
6077 return gen_vec_duplicate (mode, base);
6078 if (valid_for_const_vector_p (mode, base)
6079 && valid_for_const_vector_p (mode, step))
6080 return gen_const_vec_series (mode, base, step);
6081 return gen_rtx_VEC_SERIES (mode, base, step);
6084 /* Generate a new vector constant for mode MODE and constant value
6085 CONSTANT. */
6087 static rtx
6088 gen_const_vector (machine_mode mode, int constant)
6090 machine_mode inner = GET_MODE_INNER (mode);
6092 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6094 rtx el = const_tiny_rtx[constant][(int) inner];
6095 gcc_assert (el);
6097 return gen_const_vec_duplicate (mode, el);
6100 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6101 all elements are zero, and the one vector when all elements are one. */
6103 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6105 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6107 /* If the values are all the same, check to see if we can use one of the
6108 standard constant vectors. */
6109 if (rtvec_all_equal_p (v))
6110 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6112 unsigned int nunits = GET_NUM_ELEM (v);
6113 rtx_vector_builder builder (mode, nunits, 1);
6114 for (unsigned int i = 0; i < nunits; ++i)
6115 builder.quick_push (RTVEC_ELT (v, i));
6116 return builder.build (v);
6119 /* Initialise global register information required by all functions. */
6121 void
6122 init_emit_regs (void)
6124 int i;
6125 machine_mode mode;
6126 mem_attrs *attrs;
6128 /* Reset register attributes */
6129 reg_attrs_htab->empty ();
6131 /* We need reg_raw_mode, so initialize the modes now. */
6132 init_reg_modes_target ();
6134 /* Assign register numbers to the globally defined register rtx. */
6135 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6136 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6137 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6138 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6139 virtual_incoming_args_rtx =
6140 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6141 virtual_stack_vars_rtx =
6142 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6143 virtual_stack_dynamic_rtx =
6144 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6145 virtual_outgoing_args_rtx =
6146 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6147 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6148 virtual_preferred_stack_boundary_rtx =
6149 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6151 /* Initialize RTL for commonly used hard registers. These are
6152 copied into regno_reg_rtx as we begin to compile each function. */
6153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6154 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6156 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6157 return_address_pointer_rtx
6158 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6159 #endif
6161 pic_offset_table_rtx = NULL_RTX;
6162 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6163 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6165 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6167 mode = (machine_mode) i;
6168 attrs = ggc_cleared_alloc<mem_attrs> ();
6169 attrs->align = BITS_PER_UNIT;
6170 attrs->addrspace = ADDR_SPACE_GENERIC;
6171 if (mode != BLKmode && mode != VOIDmode)
6173 attrs->size_known_p = true;
6174 attrs->size = GET_MODE_SIZE (mode);
6175 if (STRICT_ALIGNMENT)
6176 attrs->align = GET_MODE_ALIGNMENT (mode);
6178 mode_mem_attrs[i] = attrs;
6181 split_branch_probability = profile_probability::uninitialized ();
6184 /* Initialize global machine_mode variables. */
6186 void
6187 init_derived_machine_modes (void)
6189 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6190 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6192 scalar_int_mode mode = mode_iter.require ();
6194 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6195 && !opt_byte_mode.exists ())
6196 opt_byte_mode = mode;
6198 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6199 && !opt_word_mode.exists ())
6200 opt_word_mode = mode;
6203 byte_mode = opt_byte_mode.require ();
6204 word_mode = opt_word_mode.require ();
6205 ptr_mode = as_a <scalar_int_mode>
6206 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6209 /* Create some permanent unique rtl objects shared between all functions. */
6211 void
6212 init_emit_once (void)
6214 int i;
6215 machine_mode mode;
6216 scalar_float_mode double_mode;
6217 opt_scalar_mode smode_iter;
6219 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6220 CONST_FIXED, and memory attribute hash tables. */
6221 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6223 #if TARGET_SUPPORTS_WIDE_INT
6224 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6225 #endif
6226 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6228 if (NUM_POLY_INT_COEFFS > 1)
6229 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6231 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6233 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6235 #ifdef INIT_EXPANDERS
6236 /* This is to initialize {init|mark|free}_machine_status before the first
6237 call to push_function_context_to. This is needed by the Chill front
6238 end which calls push_function_context_to before the first call to
6239 init_function_start. */
6240 INIT_EXPANDERS;
6241 #endif
6243 /* Create the unique rtx's for certain rtx codes and operand values. */
6245 /* Process stack-limiting command-line options. */
6246 if (opt_fstack_limit_symbol_arg != NULL)
6247 stack_limit_rtx
6248 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6249 if (opt_fstack_limit_register_no >= 0)
6250 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6252 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6253 tries to use these variables. */
6254 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6255 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6256 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6258 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6259 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6260 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6261 else
6262 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6264 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6266 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6267 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6268 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6270 dconstm1 = dconst1;
6271 dconstm1.sign = 1;
6273 dconsthalf = dconst1;
6274 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6276 for (i = 0; i < 3; i++)
6278 const REAL_VALUE_TYPE *const r =
6279 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6281 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6282 const_tiny_rtx[i][(int) mode] =
6283 const_double_from_real_value (*r, mode);
6285 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6286 const_tiny_rtx[i][(int) mode] =
6287 const_double_from_real_value (*r, mode);
6289 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6291 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6292 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6294 for (mode = MIN_MODE_PARTIAL_INT;
6295 mode <= MAX_MODE_PARTIAL_INT;
6296 mode = (machine_mode)((int)(mode) + 1))
6297 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6300 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6302 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6303 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6305 /* For BImode, 1 and -1 are unsigned and signed interpretations
6306 of the same value. */
6307 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6308 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6309 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6311 for (mode = MIN_MODE_PARTIAL_INT;
6312 mode <= MAX_MODE_PARTIAL_INT;
6313 mode = (machine_mode)((int)(mode) + 1))
6314 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6316 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6318 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6319 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6322 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6324 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6325 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6328 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6329 interpretations of the same value. */
6330 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6332 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6333 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6334 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6337 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6339 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6340 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6341 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6344 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6346 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6347 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6350 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6352 scalar_mode smode = smode_iter.require ();
6353 FCONST0 (smode).data.high = 0;
6354 FCONST0 (smode).data.low = 0;
6355 FCONST0 (smode).mode = smode;
6356 const_tiny_rtx[0][(int) smode]
6357 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6360 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6362 scalar_mode smode = smode_iter.require ();
6363 FCONST0 (smode).data.high = 0;
6364 FCONST0 (smode).data.low = 0;
6365 FCONST0 (smode).mode = smode;
6366 const_tiny_rtx[0][(int) smode]
6367 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6370 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6372 scalar_mode smode = smode_iter.require ();
6373 FCONST0 (smode).data.high = 0;
6374 FCONST0 (smode).data.low = 0;
6375 FCONST0 (smode).mode = smode;
6376 const_tiny_rtx[0][(int) smode]
6377 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6379 /* We store the value 1. */
6380 FCONST1 (smode).data.high = 0;
6381 FCONST1 (smode).data.low = 0;
6382 FCONST1 (smode).mode = smode;
6383 FCONST1 (smode).data
6384 = double_int_one.lshift (GET_MODE_FBIT (smode),
6385 HOST_BITS_PER_DOUBLE_INT,
6386 SIGNED_FIXED_POINT_MODE_P (smode));
6387 const_tiny_rtx[1][(int) smode]
6388 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6391 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6393 scalar_mode smode = smode_iter.require ();
6394 FCONST0 (smode).data.high = 0;
6395 FCONST0 (smode).data.low = 0;
6396 FCONST0 (smode).mode = smode;
6397 const_tiny_rtx[0][(int) smode]
6398 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6400 /* We store the value 1. */
6401 FCONST1 (smode).data.high = 0;
6402 FCONST1 (smode).data.low = 0;
6403 FCONST1 (smode).mode = smode;
6404 FCONST1 (smode).data
6405 = double_int_one.lshift (GET_MODE_FBIT (smode),
6406 HOST_BITS_PER_DOUBLE_INT,
6407 SIGNED_FIXED_POINT_MODE_P (smode));
6408 const_tiny_rtx[1][(int) smode]
6409 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6412 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6414 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6417 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6419 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6422 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6424 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6425 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6428 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6430 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6431 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6434 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6435 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6436 const_tiny_rtx[0][i] = const0_rtx;
6438 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6439 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6440 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6441 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6442 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6443 /*prev_insn=*/NULL,
6444 /*next_insn=*/NULL,
6445 /*bb=*/NULL,
6446 /*pattern=*/NULL_RTX,
6447 /*location=*/-1,
6448 CODE_FOR_nothing,
6449 /*reg_notes=*/NULL_RTX);
6452 /* Produce exact duplicate of insn INSN after AFTER.
6453 Care updating of libcall regions if present. */
6455 rtx_insn *
6456 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6458 rtx_insn *new_rtx;
6459 rtx link;
6461 switch (GET_CODE (insn))
6463 case INSN:
6464 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6465 break;
6467 case JUMP_INSN:
6468 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6469 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6470 break;
6472 case DEBUG_INSN:
6473 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6474 break;
6476 case CALL_INSN:
6477 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6478 if (CALL_INSN_FUNCTION_USAGE (insn))
6479 CALL_INSN_FUNCTION_USAGE (new_rtx)
6480 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6481 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6482 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6483 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6484 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6485 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6486 break;
6488 default:
6489 gcc_unreachable ();
6492 /* Update LABEL_NUSES. */
6493 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6495 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6497 /* If the old insn is frame related, then so is the new one. This is
6498 primarily needed for IA-64 unwind info which marks epilogue insns,
6499 which may be duplicated by the basic block reordering code. */
6500 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6502 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6503 rtx *ptail = &REG_NOTES (new_rtx);
6504 while (*ptail != NULL_RTX)
6505 ptail = &XEXP (*ptail, 1);
6507 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6508 will make them. REG_LABEL_TARGETs are created there too, but are
6509 supposed to be sticky, so we copy them. */
6510 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6511 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6513 *ptail = duplicate_reg_note (link);
6514 ptail = &XEXP (*ptail, 1);
6517 INSN_CODE (new_rtx) = INSN_CODE (insn);
6518 return new_rtx;
6521 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6523 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6525 if (hard_reg_clobbers[mode][regno])
6526 return hard_reg_clobbers[mode][regno];
6527 else
6528 return (hard_reg_clobbers[mode][regno] =
6529 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6532 static GTY((deletable)) rtx
6533 hard_reg_clobbers_high[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6535 /* Return a CLOBBER_HIGH expression for register REGNO that clobbers MODE,
6536 caching into HARD_REG_CLOBBERS_HIGH. */
6538 gen_hard_reg_clobber_high (machine_mode mode, unsigned int regno)
6540 if (hard_reg_clobbers_high[mode][regno])
6541 return hard_reg_clobbers_high[mode][regno];
6542 else
6543 return (hard_reg_clobbers_high[mode][regno]
6544 = gen_rtx_CLOBBER_HIGH (VOIDmode, gen_rtx_REG (mode, regno)));
6547 location_t prologue_location;
6548 location_t epilogue_location;
6550 /* Hold current location information and last location information, so the
6551 datastructures are built lazily only when some instructions in given
6552 place are needed. */
6553 static location_t curr_location;
6555 /* Allocate insn location datastructure. */
6556 void
6557 insn_locations_init (void)
6559 prologue_location = epilogue_location = 0;
6560 curr_location = UNKNOWN_LOCATION;
6563 /* At the end of emit stage, clear current location. */
6564 void
6565 insn_locations_finalize (void)
6567 epilogue_location = curr_location;
6568 curr_location = UNKNOWN_LOCATION;
6571 /* Set current location. */
6572 void
6573 set_curr_insn_location (location_t location)
6575 curr_location = location;
6578 /* Get current location. */
6579 location_t
6580 curr_insn_location (void)
6582 return curr_location;
6585 /* Set the location of the insn chain starting at INSN to LOC. */
6586 void
6587 set_insn_locations (rtx_insn *insn, location_t loc)
6589 while (insn)
6591 if (INSN_P (insn))
6592 INSN_LOCATION (insn) = loc;
6593 insn = NEXT_INSN (insn);
6597 /* Return lexical scope block insn belongs to. */
6598 tree
6599 insn_scope (const rtx_insn *insn)
6601 return LOCATION_BLOCK (INSN_LOCATION (insn));
6604 /* Return line number of the statement that produced this insn. */
6606 insn_line (const rtx_insn *insn)
6608 return LOCATION_LINE (INSN_LOCATION (insn));
6611 /* Return source file of the statement that produced this insn. */
6612 const char *
6613 insn_file (const rtx_insn *insn)
6615 return LOCATION_FILE (INSN_LOCATION (insn));
6618 /* Return expanded location of the statement that produced this insn. */
6619 expanded_location
6620 insn_location (const rtx_insn *insn)
6622 return expand_location (INSN_LOCATION (insn));
6625 /* Return true if memory model MODEL requires a pre-operation (release-style)
6626 barrier or a post-operation (acquire-style) barrier. While not universal,
6627 this function matches behavior of several targets. */
6629 bool
6630 need_atomic_barrier_p (enum memmodel model, bool pre)
6632 switch (model & MEMMODEL_BASE_MASK)
6634 case MEMMODEL_RELAXED:
6635 case MEMMODEL_CONSUME:
6636 return false;
6637 case MEMMODEL_RELEASE:
6638 return pre;
6639 case MEMMODEL_ACQUIRE:
6640 return !pre;
6641 case MEMMODEL_ACQ_REL:
6642 case MEMMODEL_SEQ_CST:
6643 return true;
6644 default:
6645 gcc_unreachable ();
6649 /* Return a constant shift amount for shifting a value of mode MODE
6650 by VALUE bits. */
6653 gen_int_shift_amount (machine_mode, poly_int64 value)
6655 /* Use a 64-bit mode, to avoid any truncation.
6657 ??? Perhaps this should be automatically derived from the .md files
6658 instead, or perhaps have a target hook. */
6659 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6660 ? DImode
6661 : int_mode_for_size (64, 0).require ());
6662 return gen_int_mode (value, shift_mode);
6665 /* Initialize fields of rtl_data related to stack alignment. */
6667 void
6668 rtl_data::init_stack_alignment ()
6670 stack_alignment_needed = STACK_BOUNDARY;
6671 max_used_stack_slot_alignment = STACK_BOUNDARY;
6672 stack_alignment_estimated = 0;
6673 preferred_stack_boundary = STACK_BOUNDARY;
6677 #include "gt-emit-rtl.h"