Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / emit-rtl.c
blob4e5ba41565b0679ca685d13449b0426aec628154
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "stringpool.h"
44 #include "insn-config.h"
45 #include "regs.h"
46 #include "emit-rtl.h"
47 #include "recog.h"
48 #include "diagnostic-core.h"
49 #include "alias.h"
50 #include "fold-const.h"
51 #include "varasm.h"
52 #include "cfgrtl.h"
53 #include "tree-eh.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "params.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
62 struct target_rtl default_target_rtl;
63 #if SWITCHABLE_TARGET
64 struct target_rtl *this_target_rtl = &default_target_rtl;
65 #endif
67 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69 /* Commonly used modes. */
71 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
72 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
73 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76 /* Datastructures maintained for currently processed function in RTL form. */
78 struct rtl_data x_rtl;
80 /* Indexed by pseudo register number, gives the rtx for that pseudo.
81 Allocated in parallel with regno_pointer_align.
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
85 rtx * regno_reg_rtx;
87 /* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
90 static GTY(()) int label_num = 1;
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99 rtx const_true_rtx;
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118 /* Standard pieces of rtx, to be substituted directly into things. */
119 rtx pc_rtx;
120 rtx ret_rtx;
121 rtx simple_return_rtx;
122 rtx cc0_rtx;
124 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
125 this pointer should normally never be dereferenced), but is required to be
126 distinct from NULL_RTX. Currently used by peephole2 pass. */
127 rtx_insn *invalid_insn_rtx;
129 /* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
132 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
134 typedef HOST_WIDE_INT compare_type;
136 static hashval_t hash (rtx i);
137 static bool equal (rtx i, HOST_WIDE_INT h);
140 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
144 static hashval_t hash (rtx x);
145 static bool equal (rtx x, rtx y);
148 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
150 /* A hash table storing register attribute structures. */
151 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
153 static hashval_t hash (reg_attrs *x);
154 static bool equal (reg_attrs *a, reg_attrs *b);
157 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
159 /* A hash table storing all CONST_DOUBLEs. */
160 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
162 static hashval_t hash (rtx x);
163 static bool equal (rtx x, rtx y);
166 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
168 /* A hash table storing all CONST_FIXEDs. */
169 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
171 static hashval_t hash (rtx x);
172 static bool equal (rtx x, rtx y);
175 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
177 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
179 #define first_label_num (crtl->emit.x_first_label_num)
181 static void set_used_decls (tree);
182 static void mark_label_nuses (rtx);
183 #if TARGET_SUPPORTS_WIDE_INT
184 static rtx lookup_const_wide_int (rtx);
185 #endif
186 static rtx lookup_const_double (rtx);
187 static rtx lookup_const_fixed (rtx);
188 static reg_attrs *get_reg_attrs (tree, int);
189 static rtx gen_const_vector (machine_mode, int);
190 static void copy_rtx_if_shared_1 (rtx *orig);
192 /* Probability of the conditional branch currently proceeded by try_split.
193 Set to -1 otherwise. */
194 int split_branch_probability = -1;
196 /* Returns a hash code for X (which is a really a CONST_INT). */
198 hashval_t
199 const_int_hasher::hash (rtx x)
201 return (hashval_t) INTVAL (x);
204 /* Returns nonzero if the value represented by X (which is really a
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
208 bool
209 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
211 return (INTVAL (x) == y);
214 #if TARGET_SUPPORTS_WIDE_INT
215 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
217 hashval_t
218 const_wide_int_hasher::hash (rtx x)
220 int i;
221 unsigned HOST_WIDE_INT hash = 0;
222 const_rtx xr = x;
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
227 return (hashval_t) hash;
230 /* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
234 bool
235 const_wide_int_hasher::equal (rtx x, rtx y)
237 int i;
238 const_rtx xr = x;
239 const_rtx yr = y;
240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
241 return false;
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
245 return false;
247 return true;
249 #endif
251 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
252 hashval_t
253 const_double_hasher::hash (rtx x)
255 const_rtx const value = x;
256 hashval_t h;
258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
266 return h;
269 /* Returns nonzero if the value represented by X (really a ...)
270 is the same as that represented by Y (really a ...) */
271 bool
272 const_double_hasher::equal (rtx x, rtx y)
274 const_rtx const a = x, b = y;
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
286 /* Returns a hash code for X (which is really a CONST_FIXED). */
288 hashval_t
289 const_fixed_hasher::hash (rtx x)
291 const_rtx const value = x;
292 hashval_t h;
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
300 /* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
303 bool
304 const_fixed_hasher::equal (rtx x, rtx y)
306 const_rtx const a = x, b = y;
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
313 /* Return true if the given memory attributes are equal. */
315 bool
316 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
328 && p->addrspace == q->addrspace
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
334 /* Set MEM's memory attributes so that they are the same as ATTRS. */
336 static void
337 set_mem_attrs (rtx mem, mem_attrs *attrs)
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
342 MEM_ATTRS (mem) = 0;
343 return;
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
354 /* Returns a hash code for X (which is a really a reg_attrs *). */
356 hashval_t
357 reg_attr_hasher::hash (reg_attrs *x)
359 const reg_attrs *const p = x;
361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
364 /* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
367 bool
368 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
373 return (p->decl == q->decl && p->offset == q->offset);
375 /* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
379 static reg_attrs *
380 get_reg_attrs (tree decl, int offset)
382 reg_attrs attrs;
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
388 attrs.decl = decl;
389 attrs.offset = offset;
391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
392 if (*slot == 0)
394 *slot = ggc_alloc<reg_attrs> ();
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
398 return *slot;
402 #if !HAVE_blockage
403 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
407 gen_blockage (void)
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
413 #endif
416 /* Set the mode and register number of X to MODE and REGNO. */
418 void
419 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs[regno][mode]
423 : 1);
424 PUT_MODE_RAW (x, mode);
425 set_regno_raw (x, regno, nregs);
428 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
433 gen_raw_REG (machine_mode mode, unsigned int regno)
435 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
436 set_mode_and_regno (x, mode, regno);
437 REG_ATTRS (x) = NULL;
438 ORIGINAL_REGNO (x) = regno;
439 return x;
442 /* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
446 rtx_expr_list *
447 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
453 rtx_insn_list *
454 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
460 rtx_insn *
461 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
472 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
477 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480 #endif
482 /* Look up the CONST_INT in the hash table. */
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
485 if (*slot == 0)
486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
488 return *slot;
492 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
494 return GEN_INT (trunc_int_for_mode (c, mode));
497 /* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
501 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504 static rtx
505 lookup_const_double (rtx real)
507 rtx *slot = const_double_htab->find_slot (real, INSERT);
508 if (*slot == 0)
509 *slot = real;
511 return *slot;
514 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
517 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
522 real->u.rv = value;
524 return lookup_const_double (real);
527 /* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
531 static rtx
532 lookup_const_fixed (rtx fixed)
534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
535 if (*slot == 0)
536 *slot = fixed;
538 return *slot;
541 /* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
545 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
550 fixed->u.fv = value;
552 return lookup_const_fixed (fixed);
555 #if TARGET_SUPPORTS_WIDE_INT == 0
556 /* Constructs double_int from rtx CST. */
558 double_int
559 rtx_to_double_int (const_rtx cst)
561 double_int r;
563 if (CONST_INT_P (cst))
564 r = double_int::from_shwi (INTVAL (cst));
565 else if (CONST_DOUBLE_AS_INT_P (cst))
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
570 else
571 gcc_unreachable ();
573 return r;
575 #endif
577 #if TARGET_SUPPORTS_WIDE_INT
578 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
582 static rtx
583 lookup_const_wide_int (rtx wint)
585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
586 if (*slot == 0)
587 *slot = wint;
589 return *slot;
591 #endif
593 /* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
599 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
601 unsigned int len = v.get_len ();
602 unsigned int prec = GET_MODE_PRECISION (mode);
604 /* Allow truncation but not extension since we do not know if the
605 number is signed or unsigned. */
606 gcc_assert (prec <= v.get_precision ());
608 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (v.elt (0), mode);
611 #if TARGET_SUPPORTS_WIDE_INT
613 unsigned int i;
614 rtx value;
615 unsigned int blocks_needed
616 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618 if (len > blocks_needed)
619 len = blocks_needed;
621 value = const_wide_int_alloc (len);
623 /* It is so tempting to just put the mode in here. Must control
624 myself ... */
625 PUT_MODE (value, VOIDmode);
626 CWI_PUT_NUM_ELEM (value, len);
628 for (i = 0; i < len; i++)
629 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631 return lookup_const_wide_int (value);
633 #else
634 return immed_double_const (v.elt (0), v.elt (1), mode);
635 #endif
638 #if TARGET_SUPPORTS_WIDE_INT == 0
639 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
640 of ints: I0 is the low-order word and I1 is the high-order word.
641 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
642 implied upper bits are copies of the high bit of i1. The value
643 itself is neither signed nor unsigned. Do not use this routine for
644 non-integer modes; convert to REAL_VALUE_TYPE and use
645 const_double_from_real_value. */
648 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
650 rtx value;
651 unsigned int i;
653 /* There are the following cases (note that there are no modes with
654 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
656 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
657 gen_int_mode.
658 2) If the value of the integer fits into HOST_WIDE_INT anyway
659 (i.e., i1 consists only from copies of the sign bit, and sign
660 of i0 and i1 are the same), then we return a CONST_INT for i0.
661 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
662 if (mode != VOIDmode)
664 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
665 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
666 /* We can get a 0 for an error mark. */
667 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
669 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
671 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
672 return gen_int_mode (i0, mode);
675 /* If this integer fits in one word, return a CONST_INT. */
676 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
677 return GEN_INT (i0);
679 /* We use VOIDmode for integers. */
680 value = rtx_alloc (CONST_DOUBLE);
681 PUT_MODE (value, VOIDmode);
683 CONST_DOUBLE_LOW (value) = i0;
684 CONST_DOUBLE_HIGH (value) = i1;
686 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
687 XWINT (value, i) = 0;
689 return lookup_const_double (value);
691 #endif
694 gen_rtx_REG (machine_mode mode, unsigned int regno)
696 /* In case the MD file explicitly references the frame pointer, have
697 all such references point to the same frame pointer. This is
698 used during frame pointer elimination to distinguish the explicit
699 references to these registers from pseudos that happened to be
700 assigned to them.
702 If we have eliminated the frame pointer or arg pointer, we will
703 be using it as a normal register, for example as a spill
704 register. In such cases, we might be accessing it in a mode that
705 is not Pmode and therefore cannot use the pre-allocated rtx.
707 Also don't do this when we are making new REGs in reload, since
708 we don't want to get confused with the real pointers. */
710 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
712 if (regno == FRAME_POINTER_REGNUM
713 && (!reload_completed || frame_pointer_needed))
714 return frame_pointer_rtx;
716 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
717 && regno == HARD_FRAME_POINTER_REGNUM
718 && (!reload_completed || frame_pointer_needed))
719 return hard_frame_pointer_rtx;
720 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
721 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
722 && regno == ARG_POINTER_REGNUM)
723 return arg_pointer_rtx;
724 #endif
725 #ifdef RETURN_ADDRESS_POINTER_REGNUM
726 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
727 return return_address_pointer_rtx;
728 #endif
729 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
730 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
731 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
732 return pic_offset_table_rtx;
733 if (regno == STACK_POINTER_REGNUM)
734 return stack_pointer_rtx;
737 #if 0
738 /* If the per-function register table has been set up, try to re-use
739 an existing entry in that table to avoid useless generation of RTL.
741 This code is disabled for now until we can fix the various backends
742 which depend on having non-shared hard registers in some cases. Long
743 term we want to re-enable this code as it can significantly cut down
744 on the amount of useless RTL that gets generated.
746 We'll also need to fix some code that runs after reload that wants to
747 set ORIGINAL_REGNO. */
749 if (cfun
750 && cfun->emit
751 && regno_reg_rtx
752 && regno < FIRST_PSEUDO_REGISTER
753 && reg_raw_mode[regno] == mode)
754 return regno_reg_rtx[regno];
755 #endif
757 return gen_raw_REG (mode, regno);
761 gen_rtx_MEM (machine_mode mode, rtx addr)
763 rtx rt = gen_rtx_raw_MEM (mode, addr);
765 /* This field is not cleared by the mere allocation of the rtx, so
766 we clear it here. */
767 MEM_ATTRS (rt) = 0;
769 return rt;
772 /* Generate a memory referring to non-trapping constant memory. */
775 gen_const_mem (machine_mode mode, rtx addr)
777 rtx mem = gen_rtx_MEM (mode, addr);
778 MEM_READONLY_P (mem) = 1;
779 MEM_NOTRAP_P (mem) = 1;
780 return mem;
783 /* Generate a MEM referring to fixed portions of the frame, e.g., register
784 save areas. */
787 gen_frame_mem (machine_mode mode, rtx addr)
789 rtx mem = gen_rtx_MEM (mode, addr);
790 MEM_NOTRAP_P (mem) = 1;
791 set_mem_alias_set (mem, get_frame_alias_set ());
792 return mem;
795 /* Generate a MEM referring to a temporary use of the stack, not part
796 of the fixed stack frame. For example, something which is pushed
797 by a target splitter. */
799 gen_tmp_stack_mem (machine_mode mode, rtx addr)
801 rtx mem = gen_rtx_MEM (mode, addr);
802 MEM_NOTRAP_P (mem) = 1;
803 if (!cfun->calls_alloca)
804 set_mem_alias_set (mem, get_frame_alias_set ());
805 return mem;
808 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
809 this construct would be valid, and false otherwise. */
811 bool
812 validate_subreg (machine_mode omode, machine_mode imode,
813 const_rtx reg, unsigned int offset)
815 unsigned int isize = GET_MODE_SIZE (imode);
816 unsigned int osize = GET_MODE_SIZE (omode);
818 /* All subregs must be aligned. */
819 if (offset % osize != 0)
820 return false;
822 /* The subreg offset cannot be outside the inner object. */
823 if (offset >= isize)
824 return false;
826 /* ??? This should not be here. Temporarily continue to allow word_mode
827 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
828 Generally, backends are doing something sketchy but it'll take time to
829 fix them all. */
830 if (omode == word_mode)
832 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
833 is the culprit here, and not the backends. */
834 else if (osize >= UNITS_PER_WORD && isize >= osize)
836 /* Allow component subregs of complex and vector. Though given the below
837 extraction rules, it's not always clear what that means. */
838 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
839 && GET_MODE_INNER (imode) == omode)
841 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
842 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
843 represent this. It's questionable if this ought to be represented at
844 all -- why can't this all be hidden in post-reload splitters that make
845 arbitrarily mode changes to the registers themselves. */
846 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
848 /* Subregs involving floating point modes are not allowed to
849 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
850 (subreg:SI (reg:DF) 0) isn't. */
851 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
853 if (! (isize == osize
854 /* LRA can use subreg to store a floating point value in
855 an integer mode. Although the floating point and the
856 integer modes need the same number of hard registers,
857 the size of floating point mode can be less than the
858 integer mode. LRA also uses subregs for a register
859 should be used in different mode in on insn. */
860 || lra_in_progress))
861 return false;
864 /* Paradoxical subregs must have offset zero. */
865 if (osize > isize)
866 return offset == 0;
868 /* This is a normal subreg. Verify that the offset is representable. */
870 /* For hard registers, we already have most of these rules collected in
871 subreg_offset_representable_p. */
872 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
874 unsigned int regno = REGNO (reg);
876 #ifdef CANNOT_CHANGE_MODE_CLASS
877 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
878 && GET_MODE_INNER (imode) == omode)
880 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
881 return false;
882 #endif
884 return subreg_offset_representable_p (regno, imode, offset, omode);
887 /* For pseudo registers, we want most of the same checks. Namely:
888 If the register no larger than a word, the subreg must be lowpart.
889 If the register is larger than a word, the subreg must be the lowpart
890 of a subword. A subreg does *not* perform arbitrary bit extraction.
891 Given that we've already checked mode/offset alignment, we only have
892 to check subword subregs here. */
893 if (osize < UNITS_PER_WORD
894 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
896 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
897 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
898 if (offset % UNITS_PER_WORD != low_off)
899 return false;
901 return true;
905 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
907 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
908 return gen_rtx_raw_SUBREG (mode, reg, offset);
911 /* Generate a SUBREG representing the least-significant part of REG if MODE
912 is smaller than mode of REG, otherwise paradoxical SUBREG. */
915 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
917 machine_mode inmode;
919 inmode = GET_MODE (reg);
920 if (inmode == VOIDmode)
921 inmode = mode;
922 return gen_rtx_SUBREG (mode, reg,
923 subreg_lowpart_offset (mode, inmode));
927 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
928 enum var_init_status status)
930 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
931 PAT_VAR_LOCATION_STATUS (x) = status;
932 return x;
936 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
938 rtvec
939 gen_rtvec (int n, ...)
941 int i;
942 rtvec rt_val;
943 va_list p;
945 va_start (p, n);
947 /* Don't allocate an empty rtvec... */
948 if (n == 0)
950 va_end (p);
951 return NULL_RTVEC;
954 rt_val = rtvec_alloc (n);
956 for (i = 0; i < n; i++)
957 rt_val->elem[i] = va_arg (p, rtx);
959 va_end (p);
960 return rt_val;
963 rtvec
964 gen_rtvec_v (int n, rtx *argp)
966 int i;
967 rtvec rt_val;
969 /* Don't allocate an empty rtvec... */
970 if (n == 0)
971 return NULL_RTVEC;
973 rt_val = rtvec_alloc (n);
975 for (i = 0; i < n; i++)
976 rt_val->elem[i] = *argp++;
978 return rt_val;
981 rtvec
982 gen_rtvec_v (int n, rtx_insn **argp)
984 int i;
985 rtvec rt_val;
987 /* Don't allocate an empty rtvec... */
988 if (n == 0)
989 return NULL_RTVEC;
991 rt_val = rtvec_alloc (n);
993 for (i = 0; i < n; i++)
994 rt_val->elem[i] = *argp++;
996 return rt_val;
1000 /* Return the number of bytes between the start of an OUTER_MODE
1001 in-memory value and the start of an INNER_MODE in-memory value,
1002 given that the former is a lowpart of the latter. It may be a
1003 paradoxical lowpart, in which case the offset will be negative
1004 on big-endian targets. */
1007 byte_lowpart_offset (machine_mode outer_mode,
1008 machine_mode inner_mode)
1010 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1011 return subreg_lowpart_offset (outer_mode, inner_mode);
1012 else
1013 return -subreg_lowpart_offset (inner_mode, outer_mode);
1016 /* Generate a REG rtx for a new pseudo register of mode MODE.
1017 This pseudo is assigned the next sequential register number. */
1020 gen_reg_rtx (machine_mode mode)
1022 rtx val;
1023 unsigned int align = GET_MODE_ALIGNMENT (mode);
1025 gcc_assert (can_create_pseudo_p ());
1027 /* If a virtual register with bigger mode alignment is generated,
1028 increase stack alignment estimation because it might be spilled
1029 to stack later. */
1030 if (SUPPORTS_STACK_ALIGNMENT
1031 && crtl->stack_alignment_estimated < align
1032 && !crtl->stack_realign_processed)
1034 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1035 if (crtl->stack_alignment_estimated < min_align)
1036 crtl->stack_alignment_estimated = min_align;
1039 if (generating_concat_p
1040 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1041 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1043 /* For complex modes, don't make a single pseudo.
1044 Instead, make a CONCAT of two pseudos.
1045 This allows noncontiguous allocation of the real and imaginary parts,
1046 which makes much better code. Besides, allocating DCmode
1047 pseudos overstrains reload on some machines like the 386. */
1048 rtx realpart, imagpart;
1049 machine_mode partmode = GET_MODE_INNER (mode);
1051 realpart = gen_reg_rtx (partmode);
1052 imagpart = gen_reg_rtx (partmode);
1053 return gen_rtx_CONCAT (mode, realpart, imagpart);
1056 /* Do not call gen_reg_rtx with uninitialized crtl. */
1057 gcc_assert (crtl->emit.regno_pointer_align_length);
1059 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1060 enough to have an element for this pseudo reg number. */
1062 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1064 int old_size = crtl->emit.regno_pointer_align_length;
1065 char *tmp;
1066 rtx *new1;
1068 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1069 memset (tmp + old_size, 0, old_size);
1070 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1072 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1073 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1074 regno_reg_rtx = new1;
1076 crtl->emit.regno_pointer_align_length = old_size * 2;
1079 val = gen_raw_REG (mode, reg_rtx_no);
1080 regno_reg_rtx[reg_rtx_no++] = val;
1081 return val;
1084 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1086 bool
1087 reg_is_parm_p (rtx reg)
1089 tree decl;
1091 gcc_assert (REG_P (reg));
1092 decl = REG_EXPR (reg);
1093 return (decl && TREE_CODE (decl) == PARM_DECL);
1096 /* Update NEW with the same attributes as REG, but with OFFSET added
1097 to the REG_OFFSET. */
1099 static void
1100 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1102 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1103 REG_OFFSET (reg) + offset);
1106 /* Generate a register with same attributes as REG, but with OFFSET
1107 added to the REG_OFFSET. */
1110 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1111 int offset)
1113 rtx new_rtx = gen_rtx_REG (mode, regno);
1115 update_reg_offset (new_rtx, reg, offset);
1116 return new_rtx;
1119 /* Generate a new pseudo-register with the same attributes as REG, but
1120 with OFFSET added to the REG_OFFSET. */
1123 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1125 rtx new_rtx = gen_reg_rtx (mode);
1127 update_reg_offset (new_rtx, reg, offset);
1128 return new_rtx;
1131 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1132 new register is a (possibly paradoxical) lowpart of the old one. */
1134 void
1135 adjust_reg_mode (rtx reg, machine_mode mode)
1137 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1138 PUT_MODE (reg, mode);
1141 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1142 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1144 void
1145 set_reg_attrs_from_value (rtx reg, rtx x)
1147 int offset;
1148 bool can_be_reg_pointer = true;
1150 /* Don't call mark_reg_pointer for incompatible pointer sign
1151 extension. */
1152 while (GET_CODE (x) == SIGN_EXTEND
1153 || GET_CODE (x) == ZERO_EXTEND
1154 || GET_CODE (x) == TRUNCATE
1155 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1157 #if defined(POINTERS_EXTEND_UNSIGNED)
1158 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1159 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1160 && !targetm.have_ptr_extend ())
1161 can_be_reg_pointer = false;
1162 #endif
1163 x = XEXP (x, 0);
1166 /* Hard registers can be reused for multiple purposes within the same
1167 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1168 on them is wrong. */
1169 if (HARD_REGISTER_P (reg))
1170 return;
1172 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1173 if (MEM_P (x))
1175 if (MEM_OFFSET_KNOWN_P (x))
1176 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1177 MEM_OFFSET (x) + offset);
1178 if (can_be_reg_pointer && MEM_POINTER (x))
1179 mark_reg_pointer (reg, 0);
1181 else if (REG_P (x))
1183 if (REG_ATTRS (x))
1184 update_reg_offset (reg, x, offset);
1185 if (can_be_reg_pointer && REG_POINTER (x))
1186 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1190 /* Generate a REG rtx for a new pseudo register, copying the mode
1191 and attributes from X. */
1194 gen_reg_rtx_and_attrs (rtx x)
1196 rtx reg = gen_reg_rtx (GET_MODE (x));
1197 set_reg_attrs_from_value (reg, x);
1198 return reg;
1201 /* Set the register attributes for registers contained in PARM_RTX.
1202 Use needed values from memory attributes of MEM. */
1204 void
1205 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1207 if (REG_P (parm_rtx))
1208 set_reg_attrs_from_value (parm_rtx, mem);
1209 else if (GET_CODE (parm_rtx) == PARALLEL)
1211 /* Check for a NULL entry in the first slot, used to indicate that the
1212 parameter goes both on the stack and in registers. */
1213 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1214 for (; i < XVECLEN (parm_rtx, 0); i++)
1216 rtx x = XVECEXP (parm_rtx, 0, i);
1217 if (REG_P (XEXP (x, 0)))
1218 REG_ATTRS (XEXP (x, 0))
1219 = get_reg_attrs (MEM_EXPR (mem),
1220 INTVAL (XEXP (x, 1)));
1225 /* Set the REG_ATTRS for registers in value X, given that X represents
1226 decl T. */
1228 void
1229 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1231 if (!t)
1232 return;
1233 tree tdecl = t;
1234 if (GET_CODE (x) == SUBREG)
1236 gcc_assert (subreg_lowpart_p (x));
1237 x = SUBREG_REG (x);
1239 if (REG_P (x))
1240 REG_ATTRS (x)
1241 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1242 DECL_P (tdecl)
1243 ? DECL_MODE (tdecl)
1244 : TYPE_MODE (TREE_TYPE (tdecl))));
1245 if (GET_CODE (x) == CONCAT)
1247 if (REG_P (XEXP (x, 0)))
1248 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1249 if (REG_P (XEXP (x, 1)))
1250 REG_ATTRS (XEXP (x, 1))
1251 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1253 if (GET_CODE (x) == PARALLEL)
1255 int i, start;
1257 /* Check for a NULL entry, used to indicate that the parameter goes
1258 both on the stack and in registers. */
1259 if (XEXP (XVECEXP (x, 0, 0), 0))
1260 start = 0;
1261 else
1262 start = 1;
1264 for (i = start; i < XVECLEN (x, 0); i++)
1266 rtx y = XVECEXP (x, 0, i);
1267 if (REG_P (XEXP (y, 0)))
1268 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1273 /* Assign the RTX X to declaration T. */
1275 void
1276 set_decl_rtl (tree t, rtx x)
1278 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1279 if (x)
1280 set_reg_attrs_for_decl_rtl (t, x);
1283 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1284 if the ABI requires the parameter to be passed by reference. */
1286 void
1287 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1289 DECL_INCOMING_RTL (t) = x;
1290 if (x && !by_reference_p)
1291 set_reg_attrs_for_decl_rtl (t, x);
1294 /* Identify REG (which may be a CONCAT) as a user register. */
1296 void
1297 mark_user_reg (rtx reg)
1299 if (GET_CODE (reg) == CONCAT)
1301 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1302 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1304 else
1306 gcc_assert (REG_P (reg));
1307 REG_USERVAR_P (reg) = 1;
1311 /* Identify REG as a probable pointer register and show its alignment
1312 as ALIGN, if nonzero. */
1314 void
1315 mark_reg_pointer (rtx reg, int align)
1317 if (! REG_POINTER (reg))
1319 REG_POINTER (reg) = 1;
1321 if (align)
1322 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1324 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1325 /* We can no-longer be sure just how aligned this pointer is. */
1326 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1329 /* Return 1 plus largest pseudo reg number used in the current function. */
1332 max_reg_num (void)
1334 return reg_rtx_no;
1337 /* Return 1 + the largest label number used so far in the current function. */
1340 max_label_num (void)
1342 return label_num;
1345 /* Return first label number used in this function (if any were used). */
1348 get_first_label_num (void)
1350 return first_label_num;
1353 /* If the rtx for label was created during the expansion of a nested
1354 function, then first_label_num won't include this label number.
1355 Fix this now so that array indices work later. */
1357 void
1358 maybe_set_first_label_num (rtx_code_label *x)
1360 if (CODE_LABEL_NUMBER (x) < first_label_num)
1361 first_label_num = CODE_LABEL_NUMBER (x);
1364 /* Return a value representing some low-order bits of X, where the number
1365 of low-order bits is given by MODE. Note that no conversion is done
1366 between floating-point and fixed-point values, rather, the bit
1367 representation is returned.
1369 This function handles the cases in common between gen_lowpart, below,
1370 and two variants in cse.c and combine.c. These are the cases that can
1371 be safely handled at all points in the compilation.
1373 If this is not a case we can handle, return 0. */
1376 gen_lowpart_common (machine_mode mode, rtx x)
1378 int msize = GET_MODE_SIZE (mode);
1379 int xsize;
1380 machine_mode innermode;
1382 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1383 so we have to make one up. Yuk. */
1384 innermode = GET_MODE (x);
1385 if (CONST_INT_P (x)
1386 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1387 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1388 else if (innermode == VOIDmode)
1389 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1391 xsize = GET_MODE_SIZE (innermode);
1393 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1395 if (innermode == mode)
1396 return x;
1398 /* MODE must occupy no more words than the mode of X. */
1399 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1400 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1401 return 0;
1403 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1404 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1405 return 0;
1407 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1408 && (GET_MODE_CLASS (mode) == MODE_INT
1409 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1411 /* If we are getting the low-order part of something that has been
1412 sign- or zero-extended, we can either just use the object being
1413 extended or make a narrower extension. If we want an even smaller
1414 piece than the size of the object being extended, call ourselves
1415 recursively.
1417 This case is used mostly by combine and cse. */
1419 if (GET_MODE (XEXP (x, 0)) == mode)
1420 return XEXP (x, 0);
1421 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1422 return gen_lowpart_common (mode, XEXP (x, 0));
1423 else if (msize < xsize)
1424 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1426 else if (GET_CODE (x) == SUBREG || REG_P (x)
1427 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1428 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1429 return lowpart_subreg (mode, x, innermode);
1431 /* Otherwise, we can't do this. */
1432 return 0;
1436 gen_highpart (machine_mode mode, rtx x)
1438 unsigned int msize = GET_MODE_SIZE (mode);
1439 rtx result;
1441 /* This case loses if X is a subreg. To catch bugs early,
1442 complain if an invalid MODE is used even in other cases. */
1443 gcc_assert (msize <= UNITS_PER_WORD
1444 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1446 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1447 subreg_highpart_offset (mode, GET_MODE (x)));
1448 gcc_assert (result);
1450 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1451 the target if we have a MEM. gen_highpart must return a valid operand,
1452 emitting code if necessary to do so. */
1453 if (MEM_P (result))
1455 result = validize_mem (result);
1456 gcc_assert (result);
1459 return result;
1462 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1463 be VOIDmode constant. */
1465 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1467 if (GET_MODE (exp) != VOIDmode)
1469 gcc_assert (GET_MODE (exp) == innermode);
1470 return gen_highpart (outermode, exp);
1472 return simplify_gen_subreg (outermode, exp, innermode,
1473 subreg_highpart_offset (outermode, innermode));
1476 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1478 unsigned int
1479 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1481 unsigned int offset = 0;
1482 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1484 if (difference > 0)
1486 if (WORDS_BIG_ENDIAN)
1487 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1488 if (BYTES_BIG_ENDIAN)
1489 offset += difference % UNITS_PER_WORD;
1492 return offset;
1495 /* Return offset in bytes to get OUTERMODE high part
1496 of the value in mode INNERMODE stored in memory in target format. */
1497 unsigned int
1498 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1500 unsigned int offset = 0;
1501 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1503 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1505 if (difference > 0)
1507 if (! WORDS_BIG_ENDIAN)
1508 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1509 if (! BYTES_BIG_ENDIAN)
1510 offset += difference % UNITS_PER_WORD;
1513 return offset;
1516 /* Return 1 iff X, assumed to be a SUBREG,
1517 refers to the least significant part of its containing reg.
1518 If X is not a SUBREG, always return 1 (it is its own low part!). */
1521 subreg_lowpart_p (const_rtx x)
1523 if (GET_CODE (x) != SUBREG)
1524 return 1;
1525 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1526 return 0;
1528 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1529 == SUBREG_BYTE (x));
1532 /* Return true if X is a paradoxical subreg, false otherwise. */
1533 bool
1534 paradoxical_subreg_p (const_rtx x)
1536 if (GET_CODE (x) != SUBREG)
1537 return false;
1538 return (GET_MODE_PRECISION (GET_MODE (x))
1539 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1542 /* Return subword OFFSET of operand OP.
1543 The word number, OFFSET, is interpreted as the word number starting
1544 at the low-order address. OFFSET 0 is the low-order word if not
1545 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1547 If we cannot extract the required word, we return zero. Otherwise,
1548 an rtx corresponding to the requested word will be returned.
1550 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1551 reload has completed, a valid address will always be returned. After
1552 reload, if a valid address cannot be returned, we return zero.
1554 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1555 it is the responsibility of the caller.
1557 MODE is the mode of OP in case it is a CONST_INT.
1559 ??? This is still rather broken for some cases. The problem for the
1560 moment is that all callers of this thing provide no 'goal mode' to
1561 tell us to work with. This exists because all callers were written
1562 in a word based SUBREG world.
1563 Now use of this function can be deprecated by simplify_subreg in most
1564 cases.
1568 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1570 if (mode == VOIDmode)
1571 mode = GET_MODE (op);
1573 gcc_assert (mode != VOIDmode);
1575 /* If OP is narrower than a word, fail. */
1576 if (mode != BLKmode
1577 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1578 return 0;
1580 /* If we want a word outside OP, return zero. */
1581 if (mode != BLKmode
1582 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1583 return const0_rtx;
1585 /* Form a new MEM at the requested address. */
1586 if (MEM_P (op))
1588 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1590 if (! validate_address)
1591 return new_rtx;
1593 else if (reload_completed)
1595 if (! strict_memory_address_addr_space_p (word_mode,
1596 XEXP (new_rtx, 0),
1597 MEM_ADDR_SPACE (op)))
1598 return 0;
1600 else
1601 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1604 /* Rest can be handled by simplify_subreg. */
1605 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1608 /* Similar to `operand_subword', but never return 0. If we can't
1609 extract the required subword, put OP into a register and try again.
1610 The second attempt must succeed. We always validate the address in
1611 this case.
1613 MODE is the mode of OP, in case it is CONST_INT. */
1616 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1618 rtx result = operand_subword (op, offset, 1, mode);
1620 if (result)
1621 return result;
1623 if (mode != BLKmode && mode != VOIDmode)
1625 /* If this is a register which can not be accessed by words, copy it
1626 to a pseudo register. */
1627 if (REG_P (op))
1628 op = copy_to_reg (op);
1629 else
1630 op = force_reg (mode, op);
1633 result = operand_subword (op, offset, 1, mode);
1634 gcc_assert (result);
1636 return result;
1639 /* Returns 1 if both MEM_EXPR can be considered equal
1640 and 0 otherwise. */
1643 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1645 if (expr1 == expr2)
1646 return 1;
1648 if (! expr1 || ! expr2)
1649 return 0;
1651 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1652 return 0;
1654 return operand_equal_p (expr1, expr2, 0);
1657 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1658 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1659 -1 if not known. */
1662 get_mem_align_offset (rtx mem, unsigned int align)
1664 tree expr;
1665 unsigned HOST_WIDE_INT offset;
1667 /* This function can't use
1668 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1669 || (MAX (MEM_ALIGN (mem),
1670 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1671 < align))
1672 return -1;
1673 else
1674 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1675 for two reasons:
1676 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1677 for <variable>. get_inner_reference doesn't handle it and
1678 even if it did, the alignment in that case needs to be determined
1679 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1680 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1681 isn't sufficiently aligned, the object it is in might be. */
1682 gcc_assert (MEM_P (mem));
1683 expr = MEM_EXPR (mem);
1684 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1685 return -1;
1687 offset = MEM_OFFSET (mem);
1688 if (DECL_P (expr))
1690 if (DECL_ALIGN (expr) < align)
1691 return -1;
1693 else if (INDIRECT_REF_P (expr))
1695 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1696 return -1;
1698 else if (TREE_CODE (expr) == COMPONENT_REF)
1700 while (1)
1702 tree inner = TREE_OPERAND (expr, 0);
1703 tree field = TREE_OPERAND (expr, 1);
1704 tree byte_offset = component_ref_field_offset (expr);
1705 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1707 if (!byte_offset
1708 || !tree_fits_uhwi_p (byte_offset)
1709 || !tree_fits_uhwi_p (bit_offset))
1710 return -1;
1712 offset += tree_to_uhwi (byte_offset);
1713 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1715 if (inner == NULL_TREE)
1717 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1718 < (unsigned int) align)
1719 return -1;
1720 break;
1722 else if (DECL_P (inner))
1724 if (DECL_ALIGN (inner) < align)
1725 return -1;
1726 break;
1728 else if (TREE_CODE (inner) != COMPONENT_REF)
1729 return -1;
1730 expr = inner;
1733 else
1734 return -1;
1736 return offset & ((align / BITS_PER_UNIT) - 1);
1739 /* Given REF (a MEM) and T, either the type of X or the expression
1740 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1741 if we are making a new object of this type. BITPOS is nonzero if
1742 there is an offset outstanding on T that will be applied later. */
1744 void
1745 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1746 HOST_WIDE_INT bitpos)
1748 HOST_WIDE_INT apply_bitpos = 0;
1749 tree type;
1750 struct mem_attrs attrs, *defattrs, *refattrs;
1751 addr_space_t as;
1753 /* It can happen that type_for_mode was given a mode for which there
1754 is no language-level type. In which case it returns NULL, which
1755 we can see here. */
1756 if (t == NULL_TREE)
1757 return;
1759 type = TYPE_P (t) ? t : TREE_TYPE (t);
1760 if (type == error_mark_node)
1761 return;
1763 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1764 wrong answer, as it assumes that DECL_RTL already has the right alias
1765 info. Callers should not set DECL_RTL until after the call to
1766 set_mem_attributes. */
1767 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1769 memset (&attrs, 0, sizeof (attrs));
1771 /* Get the alias set from the expression or type (perhaps using a
1772 front-end routine) and use it. */
1773 attrs.alias = get_alias_set (t);
1775 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1776 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1778 /* Default values from pre-existing memory attributes if present. */
1779 refattrs = MEM_ATTRS (ref);
1780 if (refattrs)
1782 /* ??? Can this ever happen? Calling this routine on a MEM that
1783 already carries memory attributes should probably be invalid. */
1784 attrs.expr = refattrs->expr;
1785 attrs.offset_known_p = refattrs->offset_known_p;
1786 attrs.offset = refattrs->offset;
1787 attrs.size_known_p = refattrs->size_known_p;
1788 attrs.size = refattrs->size;
1789 attrs.align = refattrs->align;
1792 /* Otherwise, default values from the mode of the MEM reference. */
1793 else
1795 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1796 gcc_assert (!defattrs->expr);
1797 gcc_assert (!defattrs->offset_known_p);
1799 /* Respect mode size. */
1800 attrs.size_known_p = defattrs->size_known_p;
1801 attrs.size = defattrs->size;
1802 /* ??? Is this really necessary? We probably should always get
1803 the size from the type below. */
1805 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1806 if T is an object, always compute the object alignment below. */
1807 if (TYPE_P (t))
1808 attrs.align = defattrs->align;
1809 else
1810 attrs.align = BITS_PER_UNIT;
1811 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1812 e.g. if the type carries an alignment attribute. Should we be
1813 able to simply always use TYPE_ALIGN? */
1816 /* We can set the alignment from the type if we are making an object,
1817 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1818 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1819 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1821 /* If the size is known, we can set that. */
1822 tree new_size = TYPE_SIZE_UNIT (type);
1824 /* The address-space is that of the type. */
1825 as = TYPE_ADDR_SPACE (type);
1827 /* If T is not a type, we may be able to deduce some more information about
1828 the expression. */
1829 if (! TYPE_P (t))
1831 tree base;
1833 if (TREE_THIS_VOLATILE (t))
1834 MEM_VOLATILE_P (ref) = 1;
1836 /* Now remove any conversions: they don't change what the underlying
1837 object is. Likewise for SAVE_EXPR. */
1838 while (CONVERT_EXPR_P (t)
1839 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1840 || TREE_CODE (t) == SAVE_EXPR)
1841 t = TREE_OPERAND (t, 0);
1843 /* Note whether this expression can trap. */
1844 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1846 base = get_base_address (t);
1847 if (base)
1849 if (DECL_P (base)
1850 && TREE_READONLY (base)
1851 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1852 && !TREE_THIS_VOLATILE (base))
1853 MEM_READONLY_P (ref) = 1;
1855 /* Mark static const strings readonly as well. */
1856 if (TREE_CODE (base) == STRING_CST
1857 && TREE_READONLY (base)
1858 && TREE_STATIC (base))
1859 MEM_READONLY_P (ref) = 1;
1861 /* Address-space information is on the base object. */
1862 if (TREE_CODE (base) == MEM_REF
1863 || TREE_CODE (base) == TARGET_MEM_REF)
1864 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1865 0))));
1866 else
1867 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1870 /* If this expression uses it's parent's alias set, mark it such
1871 that we won't change it. */
1872 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1873 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1875 /* If this is a decl, set the attributes of the MEM from it. */
1876 if (DECL_P (t))
1878 attrs.expr = t;
1879 attrs.offset_known_p = true;
1880 attrs.offset = 0;
1881 apply_bitpos = bitpos;
1882 new_size = DECL_SIZE_UNIT (t);
1885 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1886 else if (CONSTANT_CLASS_P (t))
1889 /* If this is a field reference, record it. */
1890 else if (TREE_CODE (t) == COMPONENT_REF)
1892 attrs.expr = t;
1893 attrs.offset_known_p = true;
1894 attrs.offset = 0;
1895 apply_bitpos = bitpos;
1896 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1897 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1900 /* If this is an array reference, look for an outer field reference. */
1901 else if (TREE_CODE (t) == ARRAY_REF)
1903 tree off_tree = size_zero_node;
1904 /* We can't modify t, because we use it at the end of the
1905 function. */
1906 tree t2 = t;
1910 tree index = TREE_OPERAND (t2, 1);
1911 tree low_bound = array_ref_low_bound (t2);
1912 tree unit_size = array_ref_element_size (t2);
1914 /* We assume all arrays have sizes that are a multiple of a byte.
1915 First subtract the lower bound, if any, in the type of the
1916 index, then convert to sizetype and multiply by the size of
1917 the array element. */
1918 if (! integer_zerop (low_bound))
1919 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1920 index, low_bound);
1922 off_tree = size_binop (PLUS_EXPR,
1923 size_binop (MULT_EXPR,
1924 fold_convert (sizetype,
1925 index),
1926 unit_size),
1927 off_tree);
1928 t2 = TREE_OPERAND (t2, 0);
1930 while (TREE_CODE (t2) == ARRAY_REF);
1932 if (DECL_P (t2)
1933 || TREE_CODE (t2) == COMPONENT_REF)
1935 attrs.expr = t2;
1936 attrs.offset_known_p = false;
1937 if (tree_fits_uhwi_p (off_tree))
1939 attrs.offset_known_p = true;
1940 attrs.offset = tree_to_uhwi (off_tree);
1941 apply_bitpos = bitpos;
1944 /* Else do not record a MEM_EXPR. */
1947 /* If this is an indirect reference, record it. */
1948 else if (TREE_CODE (t) == MEM_REF
1949 || TREE_CODE (t) == TARGET_MEM_REF)
1951 attrs.expr = t;
1952 attrs.offset_known_p = true;
1953 attrs.offset = 0;
1954 apply_bitpos = bitpos;
1957 /* Compute the alignment. */
1958 unsigned int obj_align;
1959 unsigned HOST_WIDE_INT obj_bitpos;
1960 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1961 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1962 if (obj_bitpos != 0)
1963 obj_align = (obj_bitpos & -obj_bitpos);
1964 attrs.align = MAX (attrs.align, obj_align);
1967 if (tree_fits_uhwi_p (new_size))
1969 attrs.size_known_p = true;
1970 attrs.size = tree_to_uhwi (new_size);
1973 /* If we modified OFFSET based on T, then subtract the outstanding
1974 bit position offset. Similarly, increase the size of the accessed
1975 object to contain the negative offset. */
1976 if (apply_bitpos)
1978 gcc_assert (attrs.offset_known_p);
1979 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1980 if (attrs.size_known_p)
1981 attrs.size += apply_bitpos / BITS_PER_UNIT;
1984 /* Now set the attributes we computed above. */
1985 attrs.addrspace = as;
1986 set_mem_attrs (ref, &attrs);
1989 void
1990 set_mem_attributes (rtx ref, tree t, int objectp)
1992 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1995 /* Set the alias set of MEM to SET. */
1997 void
1998 set_mem_alias_set (rtx mem, alias_set_type set)
2000 struct mem_attrs attrs;
2002 /* If the new and old alias sets don't conflict, something is wrong. */
2003 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2004 attrs = *get_mem_attrs (mem);
2005 attrs.alias = set;
2006 set_mem_attrs (mem, &attrs);
2009 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2011 void
2012 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2014 struct mem_attrs attrs;
2016 attrs = *get_mem_attrs (mem);
2017 attrs.addrspace = addrspace;
2018 set_mem_attrs (mem, &attrs);
2021 /* Set the alignment of MEM to ALIGN bits. */
2023 void
2024 set_mem_align (rtx mem, unsigned int align)
2026 struct mem_attrs attrs;
2028 attrs = *get_mem_attrs (mem);
2029 attrs.align = align;
2030 set_mem_attrs (mem, &attrs);
2033 /* Set the expr for MEM to EXPR. */
2035 void
2036 set_mem_expr (rtx mem, tree expr)
2038 struct mem_attrs attrs;
2040 attrs = *get_mem_attrs (mem);
2041 attrs.expr = expr;
2042 set_mem_attrs (mem, &attrs);
2045 /* Set the offset of MEM to OFFSET. */
2047 void
2048 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2050 struct mem_attrs attrs;
2052 attrs = *get_mem_attrs (mem);
2053 attrs.offset_known_p = true;
2054 attrs.offset = offset;
2055 set_mem_attrs (mem, &attrs);
2058 /* Clear the offset of MEM. */
2060 void
2061 clear_mem_offset (rtx mem)
2063 struct mem_attrs attrs;
2065 attrs = *get_mem_attrs (mem);
2066 attrs.offset_known_p = false;
2067 set_mem_attrs (mem, &attrs);
2070 /* Set the size of MEM to SIZE. */
2072 void
2073 set_mem_size (rtx mem, HOST_WIDE_INT size)
2075 struct mem_attrs attrs;
2077 attrs = *get_mem_attrs (mem);
2078 attrs.size_known_p = true;
2079 attrs.size = size;
2080 set_mem_attrs (mem, &attrs);
2083 /* Clear the size of MEM. */
2085 void
2086 clear_mem_size (rtx mem)
2088 struct mem_attrs attrs;
2090 attrs = *get_mem_attrs (mem);
2091 attrs.size_known_p = false;
2092 set_mem_attrs (mem, &attrs);
2095 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2096 and its address changed to ADDR. (VOIDmode means don't change the mode.
2097 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2098 returned memory location is required to be valid. INPLACE is true if any
2099 changes can be made directly to MEMREF or false if MEMREF must be treated
2100 as immutable.
2102 The memory attributes are not changed. */
2104 static rtx
2105 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2106 bool inplace)
2108 addr_space_t as;
2109 rtx new_rtx;
2111 gcc_assert (MEM_P (memref));
2112 as = MEM_ADDR_SPACE (memref);
2113 if (mode == VOIDmode)
2114 mode = GET_MODE (memref);
2115 if (addr == 0)
2116 addr = XEXP (memref, 0);
2117 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2118 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2119 return memref;
2121 /* Don't validate address for LRA. LRA can make the address valid
2122 by itself in most efficient way. */
2123 if (validate && !lra_in_progress)
2125 if (reload_in_progress || reload_completed)
2126 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2127 else
2128 addr = memory_address_addr_space (mode, addr, as);
2131 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2132 return memref;
2134 if (inplace)
2136 XEXP (memref, 0) = addr;
2137 return memref;
2140 new_rtx = gen_rtx_MEM (mode, addr);
2141 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2142 return new_rtx;
2145 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2146 way we are changing MEMREF, so we only preserve the alias set. */
2149 change_address (rtx memref, machine_mode mode, rtx addr)
2151 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2152 machine_mode mmode = GET_MODE (new_rtx);
2153 struct mem_attrs attrs, *defattrs;
2155 attrs = *get_mem_attrs (memref);
2156 defattrs = mode_mem_attrs[(int) mmode];
2157 attrs.expr = NULL_TREE;
2158 attrs.offset_known_p = false;
2159 attrs.size_known_p = defattrs->size_known_p;
2160 attrs.size = defattrs->size;
2161 attrs.align = defattrs->align;
2163 /* If there are no changes, just return the original memory reference. */
2164 if (new_rtx == memref)
2166 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2167 return new_rtx;
2169 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2170 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2173 set_mem_attrs (new_rtx, &attrs);
2174 return new_rtx;
2177 /* Return a memory reference like MEMREF, but with its mode changed
2178 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2179 nonzero, the memory address is forced to be valid.
2180 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2181 and the caller is responsible for adjusting MEMREF base register.
2182 If ADJUST_OBJECT is zero, the underlying object associated with the
2183 memory reference is left unchanged and the caller is responsible for
2184 dealing with it. Otherwise, if the new memory reference is outside
2185 the underlying object, even partially, then the object is dropped.
2186 SIZE, if nonzero, is the size of an access in cases where MODE
2187 has no inherent size. */
2190 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2191 int validate, int adjust_address, int adjust_object,
2192 HOST_WIDE_INT size)
2194 rtx addr = XEXP (memref, 0);
2195 rtx new_rtx;
2196 machine_mode address_mode;
2197 int pbits;
2198 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2199 unsigned HOST_WIDE_INT max_align;
2200 #ifdef POINTERS_EXTEND_UNSIGNED
2201 machine_mode pointer_mode
2202 = targetm.addr_space.pointer_mode (attrs.addrspace);
2203 #endif
2205 /* VOIDmode means no mode change for change_address_1. */
2206 if (mode == VOIDmode)
2207 mode = GET_MODE (memref);
2209 /* Take the size of non-BLKmode accesses from the mode. */
2210 defattrs = mode_mem_attrs[(int) mode];
2211 if (defattrs->size_known_p)
2212 size = defattrs->size;
2214 /* If there are no changes, just return the original memory reference. */
2215 if (mode == GET_MODE (memref) && !offset
2216 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2217 && (!validate || memory_address_addr_space_p (mode, addr,
2218 attrs.addrspace)))
2219 return memref;
2221 /* ??? Prefer to create garbage instead of creating shared rtl.
2222 This may happen even if offset is nonzero -- consider
2223 (plus (plus reg reg) const_int) -- so do this always. */
2224 addr = copy_rtx (addr);
2226 /* Convert a possibly large offset to a signed value within the
2227 range of the target address space. */
2228 address_mode = get_address_mode (memref);
2229 pbits = GET_MODE_BITSIZE (address_mode);
2230 if (HOST_BITS_PER_WIDE_INT > pbits)
2232 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2233 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2234 >> shift);
2237 if (adjust_address)
2239 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2240 object, we can merge it into the LO_SUM. */
2241 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2242 && offset >= 0
2243 && (unsigned HOST_WIDE_INT) offset
2244 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2245 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2246 plus_constant (address_mode,
2247 XEXP (addr, 1), offset));
2248 #ifdef POINTERS_EXTEND_UNSIGNED
2249 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2250 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2251 the fact that pointers are not allowed to overflow. */
2252 else if (POINTERS_EXTEND_UNSIGNED > 0
2253 && GET_CODE (addr) == ZERO_EXTEND
2254 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2255 && trunc_int_for_mode (offset, pointer_mode) == offset)
2256 addr = gen_rtx_ZERO_EXTEND (address_mode,
2257 plus_constant (pointer_mode,
2258 XEXP (addr, 0), offset));
2259 #endif
2260 else
2261 addr = plus_constant (address_mode, addr, offset);
2264 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2266 /* If the address is a REG, change_address_1 rightfully returns memref,
2267 but this would destroy memref's MEM_ATTRS. */
2268 if (new_rtx == memref && offset != 0)
2269 new_rtx = copy_rtx (new_rtx);
2271 /* Conservatively drop the object if we don't know where we start from. */
2272 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2274 attrs.expr = NULL_TREE;
2275 attrs.alias = 0;
2278 /* Compute the new values of the memory attributes due to this adjustment.
2279 We add the offsets and update the alignment. */
2280 if (attrs.offset_known_p)
2282 attrs.offset += offset;
2284 /* Drop the object if the new left end is not within its bounds. */
2285 if (adjust_object && attrs.offset < 0)
2287 attrs.expr = NULL_TREE;
2288 attrs.alias = 0;
2292 /* Compute the new alignment by taking the MIN of the alignment and the
2293 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2294 if zero. */
2295 if (offset != 0)
2297 max_align = (offset & -offset) * BITS_PER_UNIT;
2298 attrs.align = MIN (attrs.align, max_align);
2301 if (size)
2303 /* Drop the object if the new right end is not within its bounds. */
2304 if (adjust_object && (offset + size) > attrs.size)
2306 attrs.expr = NULL_TREE;
2307 attrs.alias = 0;
2309 attrs.size_known_p = true;
2310 attrs.size = size;
2312 else if (attrs.size_known_p)
2314 gcc_assert (!adjust_object);
2315 attrs.size -= offset;
2316 /* ??? The store_by_pieces machinery generates negative sizes,
2317 so don't assert for that here. */
2320 set_mem_attrs (new_rtx, &attrs);
2322 return new_rtx;
2325 /* Return a memory reference like MEMREF, but with its mode changed
2326 to MODE and its address changed to ADDR, which is assumed to be
2327 MEMREF offset by OFFSET bytes. If VALIDATE is
2328 nonzero, the memory address is forced to be valid. */
2331 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2332 HOST_WIDE_INT offset, int validate)
2334 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2335 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2338 /* Return a memory reference like MEMREF, but whose address is changed by
2339 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2340 known to be in OFFSET (possibly 1). */
2343 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2345 rtx new_rtx, addr = XEXP (memref, 0);
2346 machine_mode address_mode;
2347 struct mem_attrs attrs, *defattrs;
2349 attrs = *get_mem_attrs (memref);
2350 address_mode = get_address_mode (memref);
2351 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2353 /* At this point we don't know _why_ the address is invalid. It
2354 could have secondary memory references, multiplies or anything.
2356 However, if we did go and rearrange things, we can wind up not
2357 being able to recognize the magic around pic_offset_table_rtx.
2358 This stuff is fragile, and is yet another example of why it is
2359 bad to expose PIC machinery too early. */
2360 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2361 attrs.addrspace)
2362 && GET_CODE (addr) == PLUS
2363 && XEXP (addr, 0) == pic_offset_table_rtx)
2365 addr = force_reg (GET_MODE (addr), addr);
2366 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2369 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2370 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2372 /* If there are no changes, just return the original memory reference. */
2373 if (new_rtx == memref)
2374 return new_rtx;
2376 /* Update the alignment to reflect the offset. Reset the offset, which
2377 we don't know. */
2378 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2379 attrs.offset_known_p = false;
2380 attrs.size_known_p = defattrs->size_known_p;
2381 attrs.size = defattrs->size;
2382 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2383 set_mem_attrs (new_rtx, &attrs);
2384 return new_rtx;
2387 /* Return a memory reference like MEMREF, but with its address changed to
2388 ADDR. The caller is asserting that the actual piece of memory pointed
2389 to is the same, just the form of the address is being changed, such as
2390 by putting something into a register. INPLACE is true if any changes
2391 can be made directly to MEMREF or false if MEMREF must be treated as
2392 immutable. */
2395 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2397 /* change_address_1 copies the memory attribute structure without change
2398 and that's exactly what we want here. */
2399 update_temp_slot_address (XEXP (memref, 0), addr);
2400 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2403 /* Likewise, but the reference is not required to be valid. */
2406 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2408 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2411 /* Return a memory reference like MEMREF, but with its mode widened to
2412 MODE and offset by OFFSET. This would be used by targets that e.g.
2413 cannot issue QImode memory operations and have to use SImode memory
2414 operations plus masking logic. */
2417 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2419 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2420 struct mem_attrs attrs;
2421 unsigned int size = GET_MODE_SIZE (mode);
2423 /* If there are no changes, just return the original memory reference. */
2424 if (new_rtx == memref)
2425 return new_rtx;
2427 attrs = *get_mem_attrs (new_rtx);
2429 /* If we don't know what offset we were at within the expression, then
2430 we can't know if we've overstepped the bounds. */
2431 if (! attrs.offset_known_p)
2432 attrs.expr = NULL_TREE;
2434 while (attrs.expr)
2436 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2438 tree field = TREE_OPERAND (attrs.expr, 1);
2439 tree offset = component_ref_field_offset (attrs.expr);
2441 if (! DECL_SIZE_UNIT (field))
2443 attrs.expr = NULL_TREE;
2444 break;
2447 /* Is the field at least as large as the access? If so, ok,
2448 otherwise strip back to the containing structure. */
2449 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2450 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2451 && attrs.offset >= 0)
2452 break;
2454 if (! tree_fits_uhwi_p (offset))
2456 attrs.expr = NULL_TREE;
2457 break;
2460 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2461 attrs.offset += tree_to_uhwi (offset);
2462 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2463 / BITS_PER_UNIT);
2465 /* Similarly for the decl. */
2466 else if (DECL_P (attrs.expr)
2467 && DECL_SIZE_UNIT (attrs.expr)
2468 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2469 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2470 && (! attrs.offset_known_p || attrs.offset >= 0))
2471 break;
2472 else
2474 /* The widened memory access overflows the expression, which means
2475 that it could alias another expression. Zap it. */
2476 attrs.expr = NULL_TREE;
2477 break;
2481 if (! attrs.expr)
2482 attrs.offset_known_p = false;
2484 /* The widened memory may alias other stuff, so zap the alias set. */
2485 /* ??? Maybe use get_alias_set on any remaining expression. */
2486 attrs.alias = 0;
2487 attrs.size_known_p = true;
2488 attrs.size = size;
2489 set_mem_attrs (new_rtx, &attrs);
2490 return new_rtx;
2493 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2494 static GTY(()) tree spill_slot_decl;
2496 tree
2497 get_spill_slot_decl (bool force_build_p)
2499 tree d = spill_slot_decl;
2500 rtx rd;
2501 struct mem_attrs attrs;
2503 if (d || !force_build_p)
2504 return d;
2506 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2507 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2508 DECL_ARTIFICIAL (d) = 1;
2509 DECL_IGNORED_P (d) = 1;
2510 TREE_USED (d) = 1;
2511 spill_slot_decl = d;
2513 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2514 MEM_NOTRAP_P (rd) = 1;
2515 attrs = *mode_mem_attrs[(int) BLKmode];
2516 attrs.alias = new_alias_set ();
2517 attrs.expr = d;
2518 set_mem_attrs (rd, &attrs);
2519 SET_DECL_RTL (d, rd);
2521 return d;
2524 /* Given MEM, a result from assign_stack_local, fill in the memory
2525 attributes as appropriate for a register allocator spill slot.
2526 These slots are not aliasable by other memory. We arrange for
2527 them all to use a single MEM_EXPR, so that the aliasing code can
2528 work properly in the case of shared spill slots. */
2530 void
2531 set_mem_attrs_for_spill (rtx mem)
2533 struct mem_attrs attrs;
2534 rtx addr;
2536 attrs = *get_mem_attrs (mem);
2537 attrs.expr = get_spill_slot_decl (true);
2538 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2539 attrs.addrspace = ADDR_SPACE_GENERIC;
2541 /* We expect the incoming memory to be of the form:
2542 (mem:MODE (plus (reg sfp) (const_int offset)))
2543 with perhaps the plus missing for offset = 0. */
2544 addr = XEXP (mem, 0);
2545 attrs.offset_known_p = true;
2546 attrs.offset = 0;
2547 if (GET_CODE (addr) == PLUS
2548 && CONST_INT_P (XEXP (addr, 1)))
2549 attrs.offset = INTVAL (XEXP (addr, 1));
2551 set_mem_attrs (mem, &attrs);
2552 MEM_NOTRAP_P (mem) = 1;
2555 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2557 rtx_code_label *
2558 gen_label_rtx (void)
2560 return as_a <rtx_code_label *> (
2561 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2562 NULL, label_num++, NULL));
2565 /* For procedure integration. */
2567 /* Install new pointers to the first and last insns in the chain.
2568 Also, set cur_insn_uid to one higher than the last in use.
2569 Used for an inline-procedure after copying the insn chain. */
2571 void
2572 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2574 rtx_insn *insn;
2576 set_first_insn (first);
2577 set_last_insn (last);
2578 cur_insn_uid = 0;
2580 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2582 int debug_count = 0;
2584 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2585 cur_debug_insn_uid = 0;
2587 for (insn = first; insn; insn = NEXT_INSN (insn))
2588 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2589 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2590 else
2592 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2593 if (DEBUG_INSN_P (insn))
2594 debug_count++;
2597 if (debug_count)
2598 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2599 else
2600 cur_debug_insn_uid++;
2602 else
2603 for (insn = first; insn; insn = NEXT_INSN (insn))
2604 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2606 cur_insn_uid++;
2609 /* Go through all the RTL insn bodies and copy any invalid shared
2610 structure. This routine should only be called once. */
2612 static void
2613 unshare_all_rtl_1 (rtx_insn *insn)
2615 /* Unshare just about everything else. */
2616 unshare_all_rtl_in_chain (insn);
2618 /* Make sure the addresses of stack slots found outside the insn chain
2619 (such as, in DECL_RTL of a variable) are not shared
2620 with the insn chain.
2622 This special care is necessary when the stack slot MEM does not
2623 actually appear in the insn chain. If it does appear, its address
2624 is unshared from all else at that point. */
2625 stack_slot_list = safe_as_a <rtx_expr_list *> (
2626 copy_rtx_if_shared (stack_slot_list));
2629 /* Go through all the RTL insn bodies and copy any invalid shared
2630 structure, again. This is a fairly expensive thing to do so it
2631 should be done sparingly. */
2633 void
2634 unshare_all_rtl_again (rtx_insn *insn)
2636 rtx_insn *p;
2637 tree decl;
2639 for (p = insn; p; p = NEXT_INSN (p))
2640 if (INSN_P (p))
2642 reset_used_flags (PATTERN (p));
2643 reset_used_flags (REG_NOTES (p));
2644 if (CALL_P (p))
2645 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2648 /* Make sure that virtual stack slots are not shared. */
2649 set_used_decls (DECL_INITIAL (cfun->decl));
2651 /* Make sure that virtual parameters are not shared. */
2652 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2653 set_used_flags (DECL_RTL (decl));
2655 reset_used_flags (stack_slot_list);
2657 unshare_all_rtl_1 (insn);
2660 unsigned int
2661 unshare_all_rtl (void)
2663 unshare_all_rtl_1 (get_insns ());
2664 return 0;
2668 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2669 Recursively does the same for subexpressions. */
2671 static void
2672 verify_rtx_sharing (rtx orig, rtx insn)
2674 rtx x = orig;
2675 int i;
2676 enum rtx_code code;
2677 const char *format_ptr;
2679 if (x == 0)
2680 return;
2682 code = GET_CODE (x);
2684 /* These types may be freely shared. */
2686 switch (code)
2688 case REG:
2689 case DEBUG_EXPR:
2690 case VALUE:
2691 CASE_CONST_ANY:
2692 case SYMBOL_REF:
2693 case LABEL_REF:
2694 case CODE_LABEL:
2695 case PC:
2696 case CC0:
2697 case RETURN:
2698 case SIMPLE_RETURN:
2699 case SCRATCH:
2700 /* SCRATCH must be shared because they represent distinct values. */
2701 return;
2702 case CLOBBER:
2703 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2704 clobbers or clobbers of hard registers that originated as pseudos.
2705 This is needed to allow safe register renaming. */
2706 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2707 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2708 return;
2709 break;
2711 case CONST:
2712 if (shared_const_p (orig))
2713 return;
2714 break;
2716 case MEM:
2717 /* A MEM is allowed to be shared if its address is constant. */
2718 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2719 || reload_completed || reload_in_progress)
2720 return;
2722 break;
2724 default:
2725 break;
2728 /* This rtx may not be shared. If it has already been seen,
2729 replace it with a copy of itself. */
2730 if (flag_checking && RTX_FLAG (x, used))
2732 error ("invalid rtl sharing found in the insn");
2733 debug_rtx (insn);
2734 error ("shared rtx");
2735 debug_rtx (x);
2736 internal_error ("internal consistency failure");
2738 gcc_assert (!RTX_FLAG (x, used));
2740 RTX_FLAG (x, used) = 1;
2742 /* Now scan the subexpressions recursively. */
2744 format_ptr = GET_RTX_FORMAT (code);
2746 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2748 switch (*format_ptr++)
2750 case 'e':
2751 verify_rtx_sharing (XEXP (x, i), insn);
2752 break;
2754 case 'E':
2755 if (XVEC (x, i) != NULL)
2757 int j;
2758 int len = XVECLEN (x, i);
2760 for (j = 0; j < len; j++)
2762 /* We allow sharing of ASM_OPERANDS inside single
2763 instruction. */
2764 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2765 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2766 == ASM_OPERANDS))
2767 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2768 else
2769 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2772 break;
2775 return;
2778 /* Reset used-flags for INSN. */
2780 static void
2781 reset_insn_used_flags (rtx insn)
2783 gcc_assert (INSN_P (insn));
2784 reset_used_flags (PATTERN (insn));
2785 reset_used_flags (REG_NOTES (insn));
2786 if (CALL_P (insn))
2787 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2790 /* Go through all the RTL insn bodies and clear all the USED bits. */
2792 static void
2793 reset_all_used_flags (void)
2795 rtx_insn *p;
2797 for (p = get_insns (); p; p = NEXT_INSN (p))
2798 if (INSN_P (p))
2800 rtx pat = PATTERN (p);
2801 if (GET_CODE (pat) != SEQUENCE)
2802 reset_insn_used_flags (p);
2803 else
2805 gcc_assert (REG_NOTES (p) == NULL);
2806 for (int i = 0; i < XVECLEN (pat, 0); i++)
2808 rtx insn = XVECEXP (pat, 0, i);
2809 if (INSN_P (insn))
2810 reset_insn_used_flags (insn);
2816 /* Verify sharing in INSN. */
2818 static void
2819 verify_insn_sharing (rtx insn)
2821 gcc_assert (INSN_P (insn));
2822 reset_used_flags (PATTERN (insn));
2823 reset_used_flags (REG_NOTES (insn));
2824 if (CALL_P (insn))
2825 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2828 /* Go through all the RTL insn bodies and check that there is no unexpected
2829 sharing in between the subexpressions. */
2831 DEBUG_FUNCTION void
2832 verify_rtl_sharing (void)
2834 rtx_insn *p;
2836 timevar_push (TV_VERIFY_RTL_SHARING);
2838 reset_all_used_flags ();
2840 for (p = get_insns (); p; p = NEXT_INSN (p))
2841 if (INSN_P (p))
2843 rtx pat = PATTERN (p);
2844 if (GET_CODE (pat) != SEQUENCE)
2845 verify_insn_sharing (p);
2846 else
2847 for (int i = 0; i < XVECLEN (pat, 0); i++)
2849 rtx insn = XVECEXP (pat, 0, i);
2850 if (INSN_P (insn))
2851 verify_insn_sharing (insn);
2855 reset_all_used_flags ();
2857 timevar_pop (TV_VERIFY_RTL_SHARING);
2860 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2861 Assumes the mark bits are cleared at entry. */
2863 void
2864 unshare_all_rtl_in_chain (rtx_insn *insn)
2866 for (; insn; insn = NEXT_INSN (insn))
2867 if (INSN_P (insn))
2869 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2870 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2871 if (CALL_P (insn))
2872 CALL_INSN_FUNCTION_USAGE (insn)
2873 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2877 /* Go through all virtual stack slots of a function and mark them as
2878 shared. We never replace the DECL_RTLs themselves with a copy,
2879 but expressions mentioned into a DECL_RTL cannot be shared with
2880 expressions in the instruction stream.
2882 Note that reload may convert pseudo registers into memories in-place.
2883 Pseudo registers are always shared, but MEMs never are. Thus if we
2884 reset the used flags on MEMs in the instruction stream, we must set
2885 them again on MEMs that appear in DECL_RTLs. */
2887 static void
2888 set_used_decls (tree blk)
2890 tree t;
2892 /* Mark decls. */
2893 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2894 if (DECL_RTL_SET_P (t))
2895 set_used_flags (DECL_RTL (t));
2897 /* Now process sub-blocks. */
2898 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2899 set_used_decls (t);
2902 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2903 Recursively does the same for subexpressions. Uses
2904 copy_rtx_if_shared_1 to reduce stack space. */
2907 copy_rtx_if_shared (rtx orig)
2909 copy_rtx_if_shared_1 (&orig);
2910 return orig;
2913 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2914 use. Recursively does the same for subexpressions. */
2916 static void
2917 copy_rtx_if_shared_1 (rtx *orig1)
2919 rtx x;
2920 int i;
2921 enum rtx_code code;
2922 rtx *last_ptr;
2923 const char *format_ptr;
2924 int copied = 0;
2925 int length;
2927 /* Repeat is used to turn tail-recursion into iteration. */
2928 repeat:
2929 x = *orig1;
2931 if (x == 0)
2932 return;
2934 code = GET_CODE (x);
2936 /* These types may be freely shared. */
2938 switch (code)
2940 case REG:
2941 case DEBUG_EXPR:
2942 case VALUE:
2943 CASE_CONST_ANY:
2944 case SYMBOL_REF:
2945 case LABEL_REF:
2946 case CODE_LABEL:
2947 case PC:
2948 case CC0:
2949 case RETURN:
2950 case SIMPLE_RETURN:
2951 case SCRATCH:
2952 /* SCRATCH must be shared because they represent distinct values. */
2953 return;
2954 case CLOBBER:
2955 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2956 clobbers or clobbers of hard registers that originated as pseudos.
2957 This is needed to allow safe register renaming. */
2958 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2959 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2960 return;
2961 break;
2963 case CONST:
2964 if (shared_const_p (x))
2965 return;
2966 break;
2968 case DEBUG_INSN:
2969 case INSN:
2970 case JUMP_INSN:
2971 case CALL_INSN:
2972 case NOTE:
2973 case BARRIER:
2974 /* The chain of insns is not being copied. */
2975 return;
2977 default:
2978 break;
2981 /* This rtx may not be shared. If it has already been seen,
2982 replace it with a copy of itself. */
2984 if (RTX_FLAG (x, used))
2986 x = shallow_copy_rtx (x);
2987 copied = 1;
2989 RTX_FLAG (x, used) = 1;
2991 /* Now scan the subexpressions recursively.
2992 We can store any replaced subexpressions directly into X
2993 since we know X is not shared! Any vectors in X
2994 must be copied if X was copied. */
2996 format_ptr = GET_RTX_FORMAT (code);
2997 length = GET_RTX_LENGTH (code);
2998 last_ptr = NULL;
3000 for (i = 0; i < length; i++)
3002 switch (*format_ptr++)
3004 case 'e':
3005 if (last_ptr)
3006 copy_rtx_if_shared_1 (last_ptr);
3007 last_ptr = &XEXP (x, i);
3008 break;
3010 case 'E':
3011 if (XVEC (x, i) != NULL)
3013 int j;
3014 int len = XVECLEN (x, i);
3016 /* Copy the vector iff I copied the rtx and the length
3017 is nonzero. */
3018 if (copied && len > 0)
3019 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3021 /* Call recursively on all inside the vector. */
3022 for (j = 0; j < len; j++)
3024 if (last_ptr)
3025 copy_rtx_if_shared_1 (last_ptr);
3026 last_ptr = &XVECEXP (x, i, j);
3029 break;
3032 *orig1 = x;
3033 if (last_ptr)
3035 orig1 = last_ptr;
3036 goto repeat;
3038 return;
3041 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3043 static void
3044 mark_used_flags (rtx x, int flag)
3046 int i, j;
3047 enum rtx_code code;
3048 const char *format_ptr;
3049 int length;
3051 /* Repeat is used to turn tail-recursion into iteration. */
3052 repeat:
3053 if (x == 0)
3054 return;
3056 code = GET_CODE (x);
3058 /* These types may be freely shared so we needn't do any resetting
3059 for them. */
3061 switch (code)
3063 case REG:
3064 case DEBUG_EXPR:
3065 case VALUE:
3066 CASE_CONST_ANY:
3067 case SYMBOL_REF:
3068 case CODE_LABEL:
3069 case PC:
3070 case CC0:
3071 case RETURN:
3072 case SIMPLE_RETURN:
3073 return;
3075 case DEBUG_INSN:
3076 case INSN:
3077 case JUMP_INSN:
3078 case CALL_INSN:
3079 case NOTE:
3080 case LABEL_REF:
3081 case BARRIER:
3082 /* The chain of insns is not being copied. */
3083 return;
3085 default:
3086 break;
3089 RTX_FLAG (x, used) = flag;
3091 format_ptr = GET_RTX_FORMAT (code);
3092 length = GET_RTX_LENGTH (code);
3094 for (i = 0; i < length; i++)
3096 switch (*format_ptr++)
3098 case 'e':
3099 if (i == length-1)
3101 x = XEXP (x, i);
3102 goto repeat;
3104 mark_used_flags (XEXP (x, i), flag);
3105 break;
3107 case 'E':
3108 for (j = 0; j < XVECLEN (x, i); j++)
3109 mark_used_flags (XVECEXP (x, i, j), flag);
3110 break;
3115 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3116 to look for shared sub-parts. */
3118 void
3119 reset_used_flags (rtx x)
3121 mark_used_flags (x, 0);
3124 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3125 to look for shared sub-parts. */
3127 void
3128 set_used_flags (rtx x)
3130 mark_used_flags (x, 1);
3133 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3134 Return X or the rtx for the pseudo reg the value of X was copied into.
3135 OTHER must be valid as a SET_DEST. */
3138 make_safe_from (rtx x, rtx other)
3140 while (1)
3141 switch (GET_CODE (other))
3143 case SUBREG:
3144 other = SUBREG_REG (other);
3145 break;
3146 case STRICT_LOW_PART:
3147 case SIGN_EXTEND:
3148 case ZERO_EXTEND:
3149 other = XEXP (other, 0);
3150 break;
3151 default:
3152 goto done;
3154 done:
3155 if ((MEM_P (other)
3156 && ! CONSTANT_P (x)
3157 && !REG_P (x)
3158 && GET_CODE (x) != SUBREG)
3159 || (REG_P (other)
3160 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3161 || reg_mentioned_p (other, x))))
3163 rtx temp = gen_reg_rtx (GET_MODE (x));
3164 emit_move_insn (temp, x);
3165 return temp;
3167 return x;
3170 /* Emission of insns (adding them to the doubly-linked list). */
3172 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3174 rtx_insn *
3175 get_last_insn_anywhere (void)
3177 struct sequence_stack *seq;
3178 for (seq = get_current_sequence (); seq; seq = seq->next)
3179 if (seq->last != 0)
3180 return seq->last;
3181 return 0;
3184 /* Return the first nonnote insn emitted in current sequence or current
3185 function. This routine looks inside SEQUENCEs. */
3187 rtx_insn *
3188 get_first_nonnote_insn (void)
3190 rtx_insn *insn = get_insns ();
3192 if (insn)
3194 if (NOTE_P (insn))
3195 for (insn = next_insn (insn);
3196 insn && NOTE_P (insn);
3197 insn = next_insn (insn))
3198 continue;
3199 else
3201 if (NONJUMP_INSN_P (insn)
3202 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3203 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3207 return insn;
3210 /* Return the last nonnote insn emitted in current sequence or current
3211 function. This routine looks inside SEQUENCEs. */
3213 rtx_insn *
3214 get_last_nonnote_insn (void)
3216 rtx_insn *insn = get_last_insn ();
3218 if (insn)
3220 if (NOTE_P (insn))
3221 for (insn = previous_insn (insn);
3222 insn && NOTE_P (insn);
3223 insn = previous_insn (insn))
3224 continue;
3225 else
3227 if (NONJUMP_INSN_P (insn))
3228 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3229 insn = seq->insn (seq->len () - 1);
3233 return insn;
3236 /* Return the number of actual (non-debug) insns emitted in this
3237 function. */
3240 get_max_insn_count (void)
3242 int n = cur_insn_uid;
3244 /* The table size must be stable across -g, to avoid codegen
3245 differences due to debug insns, and not be affected by
3246 -fmin-insn-uid, to avoid excessive table size and to simplify
3247 debugging of -fcompare-debug failures. */
3248 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3249 n -= cur_debug_insn_uid;
3250 else
3251 n -= MIN_NONDEBUG_INSN_UID;
3253 return n;
3257 /* Return the next insn. If it is a SEQUENCE, return the first insn
3258 of the sequence. */
3260 rtx_insn *
3261 next_insn (rtx_insn *insn)
3263 if (insn)
3265 insn = NEXT_INSN (insn);
3266 if (insn && NONJUMP_INSN_P (insn)
3267 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3268 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3271 return insn;
3274 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3275 of the sequence. */
3277 rtx_insn *
3278 previous_insn (rtx_insn *insn)
3280 if (insn)
3282 insn = PREV_INSN (insn);
3283 if (insn && NONJUMP_INSN_P (insn))
3284 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3285 insn = seq->insn (seq->len () - 1);
3288 return insn;
3291 /* Return the next insn after INSN that is not a NOTE. This routine does not
3292 look inside SEQUENCEs. */
3294 rtx_insn *
3295 next_nonnote_insn (rtx uncast_insn)
3297 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3298 while (insn)
3300 insn = NEXT_INSN (insn);
3301 if (insn == 0 || !NOTE_P (insn))
3302 break;
3305 return insn;
3308 /* Return the next insn after INSN that is not a NOTE, but stop the
3309 search before we enter another basic block. This routine does not
3310 look inside SEQUENCEs. */
3312 rtx_insn *
3313 next_nonnote_insn_bb (rtx_insn *insn)
3315 while (insn)
3317 insn = NEXT_INSN (insn);
3318 if (insn == 0 || !NOTE_P (insn))
3319 break;
3320 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3321 return NULL;
3324 return insn;
3327 /* Return the previous insn before INSN that is not a NOTE. This routine does
3328 not look inside SEQUENCEs. */
3330 rtx_insn *
3331 prev_nonnote_insn (rtx uncast_insn)
3333 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3335 while (insn)
3337 insn = PREV_INSN (insn);
3338 if (insn == 0 || !NOTE_P (insn))
3339 break;
3342 return insn;
3345 /* Return the previous insn before INSN that is not a NOTE, but stop
3346 the search before we enter another basic block. This routine does
3347 not look inside SEQUENCEs. */
3349 rtx_insn *
3350 prev_nonnote_insn_bb (rtx uncast_insn)
3352 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3354 while (insn)
3356 insn = PREV_INSN (insn);
3357 if (insn == 0 || !NOTE_P (insn))
3358 break;
3359 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3360 return NULL;
3363 return insn;
3366 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3367 routine does not look inside SEQUENCEs. */
3369 rtx_insn *
3370 next_nondebug_insn (rtx uncast_insn)
3372 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3374 while (insn)
3376 insn = NEXT_INSN (insn);
3377 if (insn == 0 || !DEBUG_INSN_P (insn))
3378 break;
3381 return insn;
3384 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3385 This routine does not look inside SEQUENCEs. */
3387 rtx_insn *
3388 prev_nondebug_insn (rtx uncast_insn)
3390 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3392 while (insn)
3394 insn = PREV_INSN (insn);
3395 if (insn == 0 || !DEBUG_INSN_P (insn))
3396 break;
3399 return insn;
3402 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3403 This routine does not look inside SEQUENCEs. */
3405 rtx_insn *
3406 next_nonnote_nondebug_insn (rtx uncast_insn)
3408 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3410 while (insn)
3412 insn = NEXT_INSN (insn);
3413 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3414 break;
3417 return insn;
3420 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3421 This routine does not look inside SEQUENCEs. */
3423 rtx_insn *
3424 prev_nonnote_nondebug_insn (rtx uncast_insn)
3426 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3428 while (insn)
3430 insn = PREV_INSN (insn);
3431 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3432 break;
3435 return insn;
3438 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3439 or 0, if there is none. This routine does not look inside
3440 SEQUENCEs. */
3442 rtx_insn *
3443 next_real_insn (rtx uncast_insn)
3445 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3447 while (insn)
3449 insn = NEXT_INSN (insn);
3450 if (insn == 0 || INSN_P (insn))
3451 break;
3454 return insn;
3457 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3458 or 0, if there is none. This routine does not look inside
3459 SEQUENCEs. */
3461 rtx_insn *
3462 prev_real_insn (rtx uncast_insn)
3464 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3466 while (insn)
3468 insn = PREV_INSN (insn);
3469 if (insn == 0 || INSN_P (insn))
3470 break;
3473 return insn;
3476 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3477 This routine does not look inside SEQUENCEs. */
3479 rtx_call_insn *
3480 last_call_insn (void)
3482 rtx_insn *insn;
3484 for (insn = get_last_insn ();
3485 insn && !CALL_P (insn);
3486 insn = PREV_INSN (insn))
3489 return safe_as_a <rtx_call_insn *> (insn);
3492 /* Find the next insn after INSN that really does something. This routine
3493 does not look inside SEQUENCEs. After reload this also skips over
3494 standalone USE and CLOBBER insn. */
3497 active_insn_p (const_rtx insn)
3499 return (CALL_P (insn) || JUMP_P (insn)
3500 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3501 || (NONJUMP_INSN_P (insn)
3502 && (! reload_completed
3503 || (GET_CODE (PATTERN (insn)) != USE
3504 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3507 rtx_insn *
3508 next_active_insn (rtx uncast_insn)
3510 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3512 while (insn)
3514 insn = NEXT_INSN (insn);
3515 if (insn == 0 || active_insn_p (insn))
3516 break;
3519 return insn;
3522 /* Find the last insn before INSN that really does something. This routine
3523 does not look inside SEQUENCEs. After reload this also skips over
3524 standalone USE and CLOBBER insn. */
3526 rtx_insn *
3527 prev_active_insn (rtx uncast_insn)
3529 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3531 while (insn)
3533 insn = PREV_INSN (insn);
3534 if (insn == 0 || active_insn_p (insn))
3535 break;
3538 return insn;
3541 /* Return the next insn that uses CC0 after INSN, which is assumed to
3542 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3543 applied to the result of this function should yield INSN).
3545 Normally, this is simply the next insn. However, if a REG_CC_USER note
3546 is present, it contains the insn that uses CC0.
3548 Return 0 if we can't find the insn. */
3550 rtx_insn *
3551 next_cc0_user (rtx uncast_insn)
3553 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3555 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3557 if (note)
3558 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3560 insn = next_nonnote_insn (insn);
3561 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3562 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3564 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3565 return insn;
3567 return 0;
3570 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3571 note, it is the previous insn. */
3573 rtx_insn *
3574 prev_cc0_setter (rtx_insn *insn)
3576 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3578 if (note)
3579 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3581 insn = prev_nonnote_insn (insn);
3582 gcc_assert (sets_cc0_p (PATTERN (insn)));
3584 return insn;
3587 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3589 static int
3590 find_auto_inc (const_rtx x, const_rtx reg)
3592 subrtx_iterator::array_type array;
3593 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3595 const_rtx x = *iter;
3596 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3597 && rtx_equal_p (reg, XEXP (x, 0)))
3598 return true;
3600 return false;
3603 /* Increment the label uses for all labels present in rtx. */
3605 static void
3606 mark_label_nuses (rtx x)
3608 enum rtx_code code;
3609 int i, j;
3610 const char *fmt;
3612 code = GET_CODE (x);
3613 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3614 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3616 fmt = GET_RTX_FORMAT (code);
3617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3619 if (fmt[i] == 'e')
3620 mark_label_nuses (XEXP (x, i));
3621 else if (fmt[i] == 'E')
3622 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3623 mark_label_nuses (XVECEXP (x, i, j));
3628 /* Try splitting insns that can be split for better scheduling.
3629 PAT is the pattern which might split.
3630 TRIAL is the insn providing PAT.
3631 LAST is nonzero if we should return the last insn of the sequence produced.
3633 If this routine succeeds in splitting, it returns the first or last
3634 replacement insn depending on the value of LAST. Otherwise, it
3635 returns TRIAL. If the insn to be returned can be split, it will be. */
3637 rtx_insn *
3638 try_split (rtx pat, rtx_insn *trial, int last)
3640 rtx_insn *before = PREV_INSN (trial);
3641 rtx_insn *after = NEXT_INSN (trial);
3642 rtx note;
3643 rtx_insn *seq, *tem;
3644 int probability;
3645 rtx_insn *insn_last, *insn;
3646 int njumps = 0;
3647 rtx_insn *call_insn = NULL;
3649 /* We're not good at redistributing frame information. */
3650 if (RTX_FRAME_RELATED_P (trial))
3651 return trial;
3653 if (any_condjump_p (trial)
3654 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3655 split_branch_probability = XINT (note, 0);
3656 probability = split_branch_probability;
3658 seq = split_insns (pat, trial);
3660 split_branch_probability = -1;
3662 if (!seq)
3663 return trial;
3665 /* Avoid infinite loop if any insn of the result matches
3666 the original pattern. */
3667 insn_last = seq;
3668 while (1)
3670 if (INSN_P (insn_last)
3671 && rtx_equal_p (PATTERN (insn_last), pat))
3672 return trial;
3673 if (!NEXT_INSN (insn_last))
3674 break;
3675 insn_last = NEXT_INSN (insn_last);
3678 /* We will be adding the new sequence to the function. The splitters
3679 may have introduced invalid RTL sharing, so unshare the sequence now. */
3680 unshare_all_rtl_in_chain (seq);
3682 /* Mark labels and copy flags. */
3683 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3685 if (JUMP_P (insn))
3687 if (JUMP_P (trial))
3688 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3689 mark_jump_label (PATTERN (insn), insn, 0);
3690 njumps++;
3691 if (probability != -1
3692 && any_condjump_p (insn)
3693 && !find_reg_note (insn, REG_BR_PROB, 0))
3695 /* We can preserve the REG_BR_PROB notes only if exactly
3696 one jump is created, otherwise the machine description
3697 is responsible for this step using
3698 split_branch_probability variable. */
3699 gcc_assert (njumps == 1);
3700 add_int_reg_note (insn, REG_BR_PROB, probability);
3705 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3706 in SEQ and copy any additional information across. */
3707 if (CALL_P (trial))
3709 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3710 if (CALL_P (insn))
3712 rtx_insn *next;
3713 rtx *p;
3715 gcc_assert (call_insn == NULL_RTX);
3716 call_insn = insn;
3718 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3719 target may have explicitly specified. */
3720 p = &CALL_INSN_FUNCTION_USAGE (insn);
3721 while (*p)
3722 p = &XEXP (*p, 1);
3723 *p = CALL_INSN_FUNCTION_USAGE (trial);
3725 /* If the old call was a sibling call, the new one must
3726 be too. */
3727 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3729 /* If the new call is the last instruction in the sequence,
3730 it will effectively replace the old call in-situ. Otherwise
3731 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3732 so that it comes immediately after the new call. */
3733 if (NEXT_INSN (insn))
3734 for (next = NEXT_INSN (trial);
3735 next && NOTE_P (next);
3736 next = NEXT_INSN (next))
3737 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3739 remove_insn (next);
3740 add_insn_after (next, insn, NULL);
3741 break;
3746 /* Copy notes, particularly those related to the CFG. */
3747 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3749 switch (REG_NOTE_KIND (note))
3751 case REG_EH_REGION:
3752 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3753 break;
3755 case REG_NORETURN:
3756 case REG_SETJMP:
3757 case REG_TM:
3758 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3760 if (CALL_P (insn))
3761 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3763 break;
3765 case REG_NON_LOCAL_GOTO:
3766 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3768 if (JUMP_P (insn))
3769 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3771 break;
3773 case REG_INC:
3774 if (!AUTO_INC_DEC)
3775 break;
3777 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3779 rtx reg = XEXP (note, 0);
3780 if (!FIND_REG_INC_NOTE (insn, reg)
3781 && find_auto_inc (PATTERN (insn), reg))
3782 add_reg_note (insn, REG_INC, reg);
3784 break;
3786 case REG_ARGS_SIZE:
3787 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3788 break;
3790 case REG_CALL_DECL:
3791 gcc_assert (call_insn != NULL_RTX);
3792 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3793 break;
3795 default:
3796 break;
3800 /* If there are LABELS inside the split insns increment the
3801 usage count so we don't delete the label. */
3802 if (INSN_P (trial))
3804 insn = insn_last;
3805 while (insn != NULL_RTX)
3807 /* JUMP_P insns have already been "marked" above. */
3808 if (NONJUMP_INSN_P (insn))
3809 mark_label_nuses (PATTERN (insn));
3811 insn = PREV_INSN (insn);
3815 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3817 delete_insn (trial);
3819 /* Recursively call try_split for each new insn created; by the
3820 time control returns here that insn will be fully split, so
3821 set LAST and continue from the insn after the one returned.
3822 We can't use next_active_insn here since AFTER may be a note.
3823 Ignore deleted insns, which can be occur if not optimizing. */
3824 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3825 if (! tem->deleted () && INSN_P (tem))
3826 tem = try_split (PATTERN (tem), tem, 1);
3828 /* Return either the first or the last insn, depending on which was
3829 requested. */
3830 return last
3831 ? (after ? PREV_INSN (after) : get_last_insn ())
3832 : NEXT_INSN (before);
3835 /* Make and return an INSN rtx, initializing all its slots.
3836 Store PATTERN in the pattern slots. */
3838 rtx_insn *
3839 make_insn_raw (rtx pattern)
3841 rtx_insn *insn;
3843 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3845 INSN_UID (insn) = cur_insn_uid++;
3846 PATTERN (insn) = pattern;
3847 INSN_CODE (insn) = -1;
3848 REG_NOTES (insn) = NULL;
3849 INSN_LOCATION (insn) = curr_insn_location ();
3850 BLOCK_FOR_INSN (insn) = NULL;
3852 #ifdef ENABLE_RTL_CHECKING
3853 if (insn
3854 && INSN_P (insn)
3855 && (returnjump_p (insn)
3856 || (GET_CODE (insn) == SET
3857 && SET_DEST (insn) == pc_rtx)))
3859 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3860 debug_rtx (insn);
3862 #endif
3864 return insn;
3867 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3869 static rtx_insn *
3870 make_debug_insn_raw (rtx pattern)
3872 rtx_debug_insn *insn;
3874 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3875 INSN_UID (insn) = cur_debug_insn_uid++;
3876 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3877 INSN_UID (insn) = cur_insn_uid++;
3879 PATTERN (insn) = pattern;
3880 INSN_CODE (insn) = -1;
3881 REG_NOTES (insn) = NULL;
3882 INSN_LOCATION (insn) = curr_insn_location ();
3883 BLOCK_FOR_INSN (insn) = NULL;
3885 return insn;
3888 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3890 static rtx_insn *
3891 make_jump_insn_raw (rtx pattern)
3893 rtx_jump_insn *insn;
3895 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3896 INSN_UID (insn) = cur_insn_uid++;
3898 PATTERN (insn) = pattern;
3899 INSN_CODE (insn) = -1;
3900 REG_NOTES (insn) = NULL;
3901 JUMP_LABEL (insn) = NULL;
3902 INSN_LOCATION (insn) = curr_insn_location ();
3903 BLOCK_FOR_INSN (insn) = NULL;
3905 return insn;
3908 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3910 static rtx_insn *
3911 make_call_insn_raw (rtx pattern)
3913 rtx_call_insn *insn;
3915 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3916 INSN_UID (insn) = cur_insn_uid++;
3918 PATTERN (insn) = pattern;
3919 INSN_CODE (insn) = -1;
3920 REG_NOTES (insn) = NULL;
3921 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3922 INSN_LOCATION (insn) = curr_insn_location ();
3923 BLOCK_FOR_INSN (insn) = NULL;
3925 return insn;
3928 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3930 static rtx_note *
3931 make_note_raw (enum insn_note subtype)
3933 /* Some notes are never created this way at all. These notes are
3934 only created by patching out insns. */
3935 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3936 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3938 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3939 INSN_UID (note) = cur_insn_uid++;
3940 NOTE_KIND (note) = subtype;
3941 BLOCK_FOR_INSN (note) = NULL;
3942 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3943 return note;
3946 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3947 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3948 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3950 static inline void
3951 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3953 SET_PREV_INSN (insn) = prev;
3954 SET_NEXT_INSN (insn) = next;
3955 if (prev != NULL)
3957 SET_NEXT_INSN (prev) = insn;
3958 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3960 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3961 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3964 if (next != NULL)
3966 SET_PREV_INSN (next) = insn;
3967 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3969 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3970 SET_PREV_INSN (sequence->insn (0)) = insn;
3974 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3976 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3977 SET_PREV_INSN (sequence->insn (0)) = prev;
3978 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3982 /* Add INSN to the end of the doubly-linked list.
3983 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3985 void
3986 add_insn (rtx_insn *insn)
3988 rtx_insn *prev = get_last_insn ();
3989 link_insn_into_chain (insn, prev, NULL);
3990 if (NULL == get_insns ())
3991 set_first_insn (insn);
3992 set_last_insn (insn);
3995 /* Add INSN into the doubly-linked list after insn AFTER. */
3997 static void
3998 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4000 rtx_insn *next = NEXT_INSN (after);
4002 gcc_assert (!optimize || !after->deleted ());
4004 link_insn_into_chain (insn, after, next);
4006 if (next == NULL)
4008 struct sequence_stack *seq;
4010 for (seq = get_current_sequence (); seq; seq = seq->next)
4011 if (after == seq->last)
4013 seq->last = insn;
4014 break;
4019 /* Add INSN into the doubly-linked list before insn BEFORE. */
4021 static void
4022 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4024 rtx_insn *prev = PREV_INSN (before);
4026 gcc_assert (!optimize || !before->deleted ());
4028 link_insn_into_chain (insn, prev, before);
4030 if (prev == NULL)
4032 struct sequence_stack *seq;
4034 for (seq = get_current_sequence (); seq; seq = seq->next)
4035 if (before == seq->first)
4037 seq->first = insn;
4038 break;
4041 gcc_assert (seq);
4045 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4046 If BB is NULL, an attempt is made to infer the bb from before.
4048 This and the next function should be the only functions called
4049 to insert an insn once delay slots have been filled since only
4050 they know how to update a SEQUENCE. */
4052 void
4053 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4055 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4056 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4057 add_insn_after_nobb (insn, after);
4058 if (!BARRIER_P (after)
4059 && !BARRIER_P (insn)
4060 && (bb = BLOCK_FOR_INSN (after)))
4062 set_block_for_insn (insn, bb);
4063 if (INSN_P (insn))
4064 df_insn_rescan (insn);
4065 /* Should not happen as first in the BB is always
4066 either NOTE or LABEL. */
4067 if (BB_END (bb) == after
4068 /* Avoid clobbering of structure when creating new BB. */
4069 && !BARRIER_P (insn)
4070 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4071 BB_END (bb) = insn;
4075 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4076 If BB is NULL, an attempt is made to infer the bb from before.
4078 This and the previous function should be the only functions called
4079 to insert an insn once delay slots have been filled since only
4080 they know how to update a SEQUENCE. */
4082 void
4083 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4085 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4086 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4087 add_insn_before_nobb (insn, before);
4089 if (!bb
4090 && !BARRIER_P (before)
4091 && !BARRIER_P (insn))
4092 bb = BLOCK_FOR_INSN (before);
4094 if (bb)
4096 set_block_for_insn (insn, bb);
4097 if (INSN_P (insn))
4098 df_insn_rescan (insn);
4099 /* Should not happen as first in the BB is always either NOTE or
4100 LABEL. */
4101 gcc_assert (BB_HEAD (bb) != insn
4102 /* Avoid clobbering of structure when creating new BB. */
4103 || BARRIER_P (insn)
4104 || NOTE_INSN_BASIC_BLOCK_P (insn));
4108 /* Replace insn with an deleted instruction note. */
4110 void
4111 set_insn_deleted (rtx insn)
4113 if (INSN_P (insn))
4114 df_insn_delete (as_a <rtx_insn *> (insn));
4115 PUT_CODE (insn, NOTE);
4116 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4120 /* Unlink INSN from the insn chain.
4122 This function knows how to handle sequences.
4124 This function does not invalidate data flow information associated with
4125 INSN (i.e. does not call df_insn_delete). That makes this function
4126 usable for only disconnecting an insn from the chain, and re-emit it
4127 elsewhere later.
4129 To later insert INSN elsewhere in the insn chain via add_insn and
4130 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4131 the caller. Nullifying them here breaks many insn chain walks.
4133 To really delete an insn and related DF information, use delete_insn. */
4135 void
4136 remove_insn (rtx uncast_insn)
4138 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4139 rtx_insn *next = NEXT_INSN (insn);
4140 rtx_insn *prev = PREV_INSN (insn);
4141 basic_block bb;
4143 if (prev)
4145 SET_NEXT_INSN (prev) = next;
4146 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4148 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4149 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4152 else
4154 struct sequence_stack *seq;
4156 for (seq = get_current_sequence (); seq; seq = seq->next)
4157 if (insn == seq->first)
4159 seq->first = next;
4160 break;
4163 gcc_assert (seq);
4166 if (next)
4168 SET_PREV_INSN (next) = prev;
4169 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4171 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4172 SET_PREV_INSN (sequence->insn (0)) = prev;
4175 else
4177 struct sequence_stack *seq;
4179 for (seq = get_current_sequence (); seq; seq = seq->next)
4180 if (insn == seq->last)
4182 seq->last = prev;
4183 break;
4186 gcc_assert (seq);
4189 /* Fix up basic block boundaries, if necessary. */
4190 if (!BARRIER_P (insn)
4191 && (bb = BLOCK_FOR_INSN (insn)))
4193 if (BB_HEAD (bb) == insn)
4195 /* Never ever delete the basic block note without deleting whole
4196 basic block. */
4197 gcc_assert (!NOTE_P (insn));
4198 BB_HEAD (bb) = next;
4200 if (BB_END (bb) == insn)
4201 BB_END (bb) = prev;
4205 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4207 void
4208 add_function_usage_to (rtx call_insn, rtx call_fusage)
4210 gcc_assert (call_insn && CALL_P (call_insn));
4212 /* Put the register usage information on the CALL. If there is already
4213 some usage information, put ours at the end. */
4214 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4216 rtx link;
4218 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4219 link = XEXP (link, 1))
4222 XEXP (link, 1) = call_fusage;
4224 else
4225 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4228 /* Delete all insns made since FROM.
4229 FROM becomes the new last instruction. */
4231 void
4232 delete_insns_since (rtx_insn *from)
4234 if (from == 0)
4235 set_first_insn (0);
4236 else
4237 SET_NEXT_INSN (from) = 0;
4238 set_last_insn (from);
4241 /* This function is deprecated, please use sequences instead.
4243 Move a consecutive bunch of insns to a different place in the chain.
4244 The insns to be moved are those between FROM and TO.
4245 They are moved to a new position after the insn AFTER.
4246 AFTER must not be FROM or TO or any insn in between.
4248 This function does not know about SEQUENCEs and hence should not be
4249 called after delay-slot filling has been done. */
4251 void
4252 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4254 if (flag_checking)
4256 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4257 gcc_assert (after != x);
4258 gcc_assert (after != to);
4261 /* Splice this bunch out of where it is now. */
4262 if (PREV_INSN (from))
4263 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4264 if (NEXT_INSN (to))
4265 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4266 if (get_last_insn () == to)
4267 set_last_insn (PREV_INSN (from));
4268 if (get_insns () == from)
4269 set_first_insn (NEXT_INSN (to));
4271 /* Make the new neighbors point to it and it to them. */
4272 if (NEXT_INSN (after))
4273 SET_PREV_INSN (NEXT_INSN (after)) = to;
4275 SET_NEXT_INSN (to) = NEXT_INSN (after);
4276 SET_PREV_INSN (from) = after;
4277 SET_NEXT_INSN (after) = from;
4278 if (after == get_last_insn ())
4279 set_last_insn (to);
4282 /* Same as function above, but take care to update BB boundaries. */
4283 void
4284 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4286 rtx_insn *prev = PREV_INSN (from);
4287 basic_block bb, bb2;
4289 reorder_insns_nobb (from, to, after);
4291 if (!BARRIER_P (after)
4292 && (bb = BLOCK_FOR_INSN (after)))
4294 rtx_insn *x;
4295 df_set_bb_dirty (bb);
4297 if (!BARRIER_P (from)
4298 && (bb2 = BLOCK_FOR_INSN (from)))
4300 if (BB_END (bb2) == to)
4301 BB_END (bb2) = prev;
4302 df_set_bb_dirty (bb2);
4305 if (BB_END (bb) == after)
4306 BB_END (bb) = to;
4308 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4309 if (!BARRIER_P (x))
4310 df_insn_change_bb (x, bb);
4315 /* Emit insn(s) of given code and pattern
4316 at a specified place within the doubly-linked list.
4318 All of the emit_foo global entry points accept an object
4319 X which is either an insn list or a PATTERN of a single
4320 instruction.
4322 There are thus a few canonical ways to generate code and
4323 emit it at a specific place in the instruction stream. For
4324 example, consider the instruction named SPOT and the fact that
4325 we would like to emit some instructions before SPOT. We might
4326 do it like this:
4328 start_sequence ();
4329 ... emit the new instructions ...
4330 insns_head = get_insns ();
4331 end_sequence ();
4333 emit_insn_before (insns_head, SPOT);
4335 It used to be common to generate SEQUENCE rtl instead, but that
4336 is a relic of the past which no longer occurs. The reason is that
4337 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4338 generated would almost certainly die right after it was created. */
4340 static rtx_insn *
4341 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4342 rtx_insn *(*make_raw) (rtx))
4344 rtx_insn *insn;
4346 gcc_assert (before);
4348 if (x == NULL_RTX)
4349 return safe_as_a <rtx_insn *> (last);
4351 switch (GET_CODE (x))
4353 case DEBUG_INSN:
4354 case INSN:
4355 case JUMP_INSN:
4356 case CALL_INSN:
4357 case CODE_LABEL:
4358 case BARRIER:
4359 case NOTE:
4360 insn = as_a <rtx_insn *> (x);
4361 while (insn)
4363 rtx_insn *next = NEXT_INSN (insn);
4364 add_insn_before (insn, before, bb);
4365 last = insn;
4366 insn = next;
4368 break;
4370 #ifdef ENABLE_RTL_CHECKING
4371 case SEQUENCE:
4372 gcc_unreachable ();
4373 break;
4374 #endif
4376 default:
4377 last = (*make_raw) (x);
4378 add_insn_before (last, before, bb);
4379 break;
4382 return safe_as_a <rtx_insn *> (last);
4385 /* Make X be output before the instruction BEFORE. */
4387 rtx_insn *
4388 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4390 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4393 /* Make an instruction with body X and code JUMP_INSN
4394 and output it before the instruction BEFORE. */
4396 rtx_jump_insn *
4397 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4399 return as_a <rtx_jump_insn *> (
4400 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4401 make_jump_insn_raw));
4404 /* Make an instruction with body X and code CALL_INSN
4405 and output it before the instruction BEFORE. */
4407 rtx_insn *
4408 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4410 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4411 make_call_insn_raw);
4414 /* Make an instruction with body X and code DEBUG_INSN
4415 and output it before the instruction BEFORE. */
4417 rtx_insn *
4418 emit_debug_insn_before_noloc (rtx x, rtx before)
4420 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4421 make_debug_insn_raw);
4424 /* Make an insn of code BARRIER
4425 and output it before the insn BEFORE. */
4427 rtx_barrier *
4428 emit_barrier_before (rtx before)
4430 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4432 INSN_UID (insn) = cur_insn_uid++;
4434 add_insn_before (insn, before, NULL);
4435 return insn;
4438 /* Emit the label LABEL before the insn BEFORE. */
4440 rtx_code_label *
4441 emit_label_before (rtx label, rtx_insn *before)
4443 gcc_checking_assert (INSN_UID (label) == 0);
4444 INSN_UID (label) = cur_insn_uid++;
4445 add_insn_before (label, before, NULL);
4446 return as_a <rtx_code_label *> (label);
4449 /* Helper for emit_insn_after, handles lists of instructions
4450 efficiently. */
4452 static rtx_insn *
4453 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4455 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4456 rtx_insn *last;
4457 rtx_insn *after_after;
4458 if (!bb && !BARRIER_P (after))
4459 bb = BLOCK_FOR_INSN (after);
4461 if (bb)
4463 df_set_bb_dirty (bb);
4464 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4465 if (!BARRIER_P (last))
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4470 if (!BARRIER_P (last))
4472 set_block_for_insn (last, bb);
4473 df_insn_rescan (last);
4475 if (BB_END (bb) == after)
4476 BB_END (bb) = last;
4478 else
4479 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4480 continue;
4482 after_after = NEXT_INSN (after);
4484 SET_NEXT_INSN (after) = first;
4485 SET_PREV_INSN (first) = after;
4486 SET_NEXT_INSN (last) = after_after;
4487 if (after_after)
4488 SET_PREV_INSN (after_after) = last;
4490 if (after == get_last_insn ())
4491 set_last_insn (last);
4493 return last;
4496 static rtx_insn *
4497 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4498 rtx_insn *(*make_raw)(rtx))
4500 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4501 rtx_insn *last = after;
4503 gcc_assert (after);
4505 if (x == NULL_RTX)
4506 return last;
4508 switch (GET_CODE (x))
4510 case DEBUG_INSN:
4511 case INSN:
4512 case JUMP_INSN:
4513 case CALL_INSN:
4514 case CODE_LABEL:
4515 case BARRIER:
4516 case NOTE:
4517 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4518 break;
4520 #ifdef ENABLE_RTL_CHECKING
4521 case SEQUENCE:
4522 gcc_unreachable ();
4523 break;
4524 #endif
4526 default:
4527 last = (*make_raw) (x);
4528 add_insn_after (last, after, bb);
4529 break;
4532 return last;
4535 /* Make X be output after the insn AFTER and set the BB of insn. If
4536 BB is NULL, an attempt is made to infer the BB from AFTER. */
4538 rtx_insn *
4539 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4541 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4545 /* Make an insn of code JUMP_INSN with body X
4546 and output it after the insn AFTER. */
4548 rtx_jump_insn *
4549 emit_jump_insn_after_noloc (rtx x, rtx after)
4551 return as_a <rtx_jump_insn *> (
4552 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4555 /* Make an instruction with body X and code CALL_INSN
4556 and output it after the instruction AFTER. */
4558 rtx_insn *
4559 emit_call_insn_after_noloc (rtx x, rtx after)
4561 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4564 /* Make an instruction with body X and code CALL_INSN
4565 and output it after the instruction AFTER. */
4567 rtx_insn *
4568 emit_debug_insn_after_noloc (rtx x, rtx after)
4570 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4573 /* Make an insn of code BARRIER
4574 and output it after the insn AFTER. */
4576 rtx_barrier *
4577 emit_barrier_after (rtx after)
4579 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4581 INSN_UID (insn) = cur_insn_uid++;
4583 add_insn_after (insn, after, NULL);
4584 return insn;
4587 /* Emit the label LABEL after the insn AFTER. */
4589 rtx_insn *
4590 emit_label_after (rtx label, rtx_insn *after)
4592 gcc_checking_assert (INSN_UID (label) == 0);
4593 INSN_UID (label) = cur_insn_uid++;
4594 add_insn_after (label, after, NULL);
4595 return as_a <rtx_insn *> (label);
4598 /* Notes require a bit of special handling: Some notes need to have their
4599 BLOCK_FOR_INSN set, others should never have it set, and some should
4600 have it set or clear depending on the context. */
4602 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4603 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4604 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4606 static bool
4607 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4609 switch (subtype)
4611 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4612 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4613 return true;
4615 /* Notes for var tracking and EH region markers can appear between or
4616 inside basic blocks. If the caller is emitting on the basic block
4617 boundary, do not set BLOCK_FOR_INSN on the new note. */
4618 case NOTE_INSN_VAR_LOCATION:
4619 case NOTE_INSN_CALL_ARG_LOCATION:
4620 case NOTE_INSN_EH_REGION_BEG:
4621 case NOTE_INSN_EH_REGION_END:
4622 return on_bb_boundary_p;
4624 /* Otherwise, BLOCK_FOR_INSN must be set. */
4625 default:
4626 return false;
4630 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4632 rtx_note *
4633 emit_note_after (enum insn_note subtype, rtx_insn *after)
4635 rtx_note *note = make_note_raw (subtype);
4636 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4637 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4639 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4640 add_insn_after_nobb (note, after);
4641 else
4642 add_insn_after (note, after, bb);
4643 return note;
4646 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4648 rtx_note *
4649 emit_note_before (enum insn_note subtype, rtx_insn *before)
4651 rtx_note *note = make_note_raw (subtype);
4652 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4653 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4655 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4656 add_insn_before_nobb (note, before);
4657 else
4658 add_insn_before (note, before, bb);
4659 return note;
4662 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4663 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4665 static rtx_insn *
4666 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4667 rtx_insn *(*make_raw) (rtx))
4669 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4670 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4672 if (pattern == NULL_RTX || !loc)
4673 return last;
4675 after = NEXT_INSN (after);
4676 while (1)
4678 if (active_insn_p (after)
4679 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4680 && !INSN_LOCATION (after))
4681 INSN_LOCATION (after) = loc;
4682 if (after == last)
4683 break;
4684 after = NEXT_INSN (after);
4686 return last;
4689 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4690 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4691 any DEBUG_INSNs. */
4693 static rtx_insn *
4694 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4695 rtx_insn *(*make_raw) (rtx))
4697 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4698 rtx_insn *prev = after;
4700 if (skip_debug_insns)
4701 while (DEBUG_INSN_P (prev))
4702 prev = PREV_INSN (prev);
4704 if (INSN_P (prev))
4705 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4706 make_raw);
4707 else
4708 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4711 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4712 rtx_insn *
4713 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4715 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4718 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4719 rtx_insn *
4720 emit_insn_after (rtx pattern, rtx after)
4722 return emit_pattern_after (pattern, after, true, make_insn_raw);
4725 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4726 rtx_jump_insn *
4727 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4729 return as_a <rtx_jump_insn *> (
4730 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4733 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4734 rtx_jump_insn *
4735 emit_jump_insn_after (rtx pattern, rtx after)
4737 return as_a <rtx_jump_insn *> (
4738 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4741 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4742 rtx_insn *
4743 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4745 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4748 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4749 rtx_insn *
4750 emit_call_insn_after (rtx pattern, rtx after)
4752 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4755 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4756 rtx_insn *
4757 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4759 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4762 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4763 rtx_insn *
4764 emit_debug_insn_after (rtx pattern, rtx after)
4766 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4769 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4770 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4771 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4772 CALL_INSN, etc. */
4774 static rtx_insn *
4775 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4776 rtx_insn *(*make_raw) (rtx))
4778 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4779 rtx_insn *first = PREV_INSN (before);
4780 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4781 insnp ? before : NULL_RTX,
4782 NULL, make_raw);
4784 if (pattern == NULL_RTX || !loc)
4785 return last;
4787 if (!first)
4788 first = get_insns ();
4789 else
4790 first = NEXT_INSN (first);
4791 while (1)
4793 if (active_insn_p (first)
4794 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4795 && !INSN_LOCATION (first))
4796 INSN_LOCATION (first) = loc;
4797 if (first == last)
4798 break;
4799 first = NEXT_INSN (first);
4801 return last;
4804 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4805 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4806 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4807 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4809 static rtx_insn *
4810 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4811 bool insnp, rtx_insn *(*make_raw) (rtx))
4813 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4814 rtx_insn *next = before;
4816 if (skip_debug_insns)
4817 while (DEBUG_INSN_P (next))
4818 next = PREV_INSN (next);
4820 if (INSN_P (next))
4821 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4822 insnp, make_raw);
4823 else
4824 return emit_pattern_before_noloc (pattern, before,
4825 insnp ? before : NULL_RTX,
4826 NULL, make_raw);
4829 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4830 rtx_insn *
4831 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4833 return emit_pattern_before_setloc (pattern, before, loc, true,
4834 make_insn_raw);
4837 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4838 rtx_insn *
4839 emit_insn_before (rtx pattern, rtx before)
4841 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4844 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4845 rtx_jump_insn *
4846 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4848 return as_a <rtx_jump_insn *> (
4849 emit_pattern_before_setloc (pattern, before, loc, false,
4850 make_jump_insn_raw));
4853 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4854 rtx_jump_insn *
4855 emit_jump_insn_before (rtx pattern, rtx before)
4857 return as_a <rtx_jump_insn *> (
4858 emit_pattern_before (pattern, before, true, false,
4859 make_jump_insn_raw));
4862 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4863 rtx_insn *
4864 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4866 return emit_pattern_before_setloc (pattern, before, loc, false,
4867 make_call_insn_raw);
4870 /* Like emit_call_insn_before_noloc,
4871 but set insn_location according to BEFORE. */
4872 rtx_insn *
4873 emit_call_insn_before (rtx pattern, rtx_insn *before)
4875 return emit_pattern_before (pattern, before, true, false,
4876 make_call_insn_raw);
4879 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4880 rtx_insn *
4881 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4883 return emit_pattern_before_setloc (pattern, before, loc, false,
4884 make_debug_insn_raw);
4887 /* Like emit_debug_insn_before_noloc,
4888 but set insn_location according to BEFORE. */
4889 rtx_insn *
4890 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4892 return emit_pattern_before (pattern, before, false, false,
4893 make_debug_insn_raw);
4896 /* Take X and emit it at the end of the doubly-linked
4897 INSN list.
4899 Returns the last insn emitted. */
4901 rtx_insn *
4902 emit_insn (rtx x)
4904 rtx_insn *last = get_last_insn ();
4905 rtx_insn *insn;
4907 if (x == NULL_RTX)
4908 return last;
4910 switch (GET_CODE (x))
4912 case DEBUG_INSN:
4913 case INSN:
4914 case JUMP_INSN:
4915 case CALL_INSN:
4916 case CODE_LABEL:
4917 case BARRIER:
4918 case NOTE:
4919 insn = as_a <rtx_insn *> (x);
4920 while (insn)
4922 rtx_insn *next = NEXT_INSN (insn);
4923 add_insn (insn);
4924 last = insn;
4925 insn = next;
4927 break;
4929 #ifdef ENABLE_RTL_CHECKING
4930 case JUMP_TABLE_DATA:
4931 case SEQUENCE:
4932 gcc_unreachable ();
4933 break;
4934 #endif
4936 default:
4937 last = make_insn_raw (x);
4938 add_insn (last);
4939 break;
4942 return last;
4945 /* Make an insn of code DEBUG_INSN with pattern X
4946 and add it to the end of the doubly-linked list. */
4948 rtx_insn *
4949 emit_debug_insn (rtx x)
4951 rtx_insn *last = get_last_insn ();
4952 rtx_insn *insn;
4954 if (x == NULL_RTX)
4955 return last;
4957 switch (GET_CODE (x))
4959 case DEBUG_INSN:
4960 case INSN:
4961 case JUMP_INSN:
4962 case CALL_INSN:
4963 case CODE_LABEL:
4964 case BARRIER:
4965 case NOTE:
4966 insn = as_a <rtx_insn *> (x);
4967 while (insn)
4969 rtx_insn *next = NEXT_INSN (insn);
4970 add_insn (insn);
4971 last = insn;
4972 insn = next;
4974 break;
4976 #ifdef ENABLE_RTL_CHECKING
4977 case JUMP_TABLE_DATA:
4978 case SEQUENCE:
4979 gcc_unreachable ();
4980 break;
4981 #endif
4983 default:
4984 last = make_debug_insn_raw (x);
4985 add_insn (last);
4986 break;
4989 return last;
4992 /* Make an insn of code JUMP_INSN with pattern X
4993 and add it to the end of the doubly-linked list. */
4995 rtx_insn *
4996 emit_jump_insn (rtx x)
4998 rtx_insn *last = NULL;
4999 rtx_insn *insn;
5001 switch (GET_CODE (x))
5003 case DEBUG_INSN:
5004 case INSN:
5005 case JUMP_INSN:
5006 case CALL_INSN:
5007 case CODE_LABEL:
5008 case BARRIER:
5009 case NOTE:
5010 insn = as_a <rtx_insn *> (x);
5011 while (insn)
5013 rtx_insn *next = NEXT_INSN (insn);
5014 add_insn (insn);
5015 last = insn;
5016 insn = next;
5018 break;
5020 #ifdef ENABLE_RTL_CHECKING
5021 case JUMP_TABLE_DATA:
5022 case SEQUENCE:
5023 gcc_unreachable ();
5024 break;
5025 #endif
5027 default:
5028 last = make_jump_insn_raw (x);
5029 add_insn (last);
5030 break;
5033 return last;
5036 /* Make an insn of code CALL_INSN with pattern X
5037 and add it to the end of the doubly-linked list. */
5039 rtx_insn *
5040 emit_call_insn (rtx x)
5042 rtx_insn *insn;
5044 switch (GET_CODE (x))
5046 case DEBUG_INSN:
5047 case INSN:
5048 case JUMP_INSN:
5049 case CALL_INSN:
5050 case CODE_LABEL:
5051 case BARRIER:
5052 case NOTE:
5053 insn = emit_insn (x);
5054 break;
5056 #ifdef ENABLE_RTL_CHECKING
5057 case SEQUENCE:
5058 case JUMP_TABLE_DATA:
5059 gcc_unreachable ();
5060 break;
5061 #endif
5063 default:
5064 insn = make_call_insn_raw (x);
5065 add_insn (insn);
5066 break;
5069 return insn;
5072 /* Add the label LABEL to the end of the doubly-linked list. */
5074 rtx_code_label *
5075 emit_label (rtx uncast_label)
5077 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5079 gcc_checking_assert (INSN_UID (label) == 0);
5080 INSN_UID (label) = cur_insn_uid++;
5081 add_insn (label);
5082 return label;
5085 /* Make an insn of code JUMP_TABLE_DATA
5086 and add it to the end of the doubly-linked list. */
5088 rtx_jump_table_data *
5089 emit_jump_table_data (rtx table)
5091 rtx_jump_table_data *jump_table_data =
5092 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5093 INSN_UID (jump_table_data) = cur_insn_uid++;
5094 PATTERN (jump_table_data) = table;
5095 BLOCK_FOR_INSN (jump_table_data) = NULL;
5096 add_insn (jump_table_data);
5097 return jump_table_data;
5100 /* Make an insn of code BARRIER
5101 and add it to the end of the doubly-linked list. */
5103 rtx_barrier *
5104 emit_barrier (void)
5106 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5107 INSN_UID (barrier) = cur_insn_uid++;
5108 add_insn (barrier);
5109 return barrier;
5112 /* Emit a copy of note ORIG. */
5114 rtx_note *
5115 emit_note_copy (rtx_note *orig)
5117 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5118 rtx_note *note = make_note_raw (kind);
5119 NOTE_DATA (note) = NOTE_DATA (orig);
5120 add_insn (note);
5121 return note;
5124 /* Make an insn of code NOTE or type NOTE_NO
5125 and add it to the end of the doubly-linked list. */
5127 rtx_note *
5128 emit_note (enum insn_note kind)
5130 rtx_note *note = make_note_raw (kind);
5131 add_insn (note);
5132 return note;
5135 /* Emit a clobber of lvalue X. */
5137 rtx_insn *
5138 emit_clobber (rtx x)
5140 /* CONCATs should not appear in the insn stream. */
5141 if (GET_CODE (x) == CONCAT)
5143 emit_clobber (XEXP (x, 0));
5144 return emit_clobber (XEXP (x, 1));
5146 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5149 /* Return a sequence of insns to clobber lvalue X. */
5151 rtx_insn *
5152 gen_clobber (rtx x)
5154 rtx_insn *seq;
5156 start_sequence ();
5157 emit_clobber (x);
5158 seq = get_insns ();
5159 end_sequence ();
5160 return seq;
5163 /* Emit a use of rvalue X. */
5165 rtx_insn *
5166 emit_use (rtx x)
5168 /* CONCATs should not appear in the insn stream. */
5169 if (GET_CODE (x) == CONCAT)
5171 emit_use (XEXP (x, 0));
5172 return emit_use (XEXP (x, 1));
5174 return emit_insn (gen_rtx_USE (VOIDmode, x));
5177 /* Return a sequence of insns to use rvalue X. */
5179 rtx_insn *
5180 gen_use (rtx x)
5182 rtx_insn *seq;
5184 start_sequence ();
5185 emit_use (x);
5186 seq = get_insns ();
5187 end_sequence ();
5188 return seq;
5191 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5192 Return the set in INSN that such notes describe, or NULL if the notes
5193 have no meaning for INSN. */
5196 set_for_reg_notes (rtx insn)
5198 rtx pat, reg;
5200 if (!INSN_P (insn))
5201 return NULL_RTX;
5203 pat = PATTERN (insn);
5204 if (GET_CODE (pat) == PARALLEL)
5206 /* We do not use single_set because that ignores SETs of unused
5207 registers. REG_EQUAL and REG_EQUIV notes really do require the
5208 PARALLEL to have a single SET. */
5209 if (multiple_sets (insn))
5210 return NULL_RTX;
5211 pat = XVECEXP (pat, 0, 0);
5214 if (GET_CODE (pat) != SET)
5215 return NULL_RTX;
5217 reg = SET_DEST (pat);
5219 /* Notes apply to the contents of a STRICT_LOW_PART. */
5220 if (GET_CODE (reg) == STRICT_LOW_PART
5221 || GET_CODE (reg) == ZERO_EXTRACT)
5222 reg = XEXP (reg, 0);
5224 /* Check that we have a register. */
5225 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5226 return NULL_RTX;
5228 return pat;
5231 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5232 note of this type already exists, remove it first. */
5235 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5237 rtx note = find_reg_note (insn, kind, NULL_RTX);
5239 switch (kind)
5241 case REG_EQUAL:
5242 case REG_EQUIV:
5243 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5244 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5245 return NULL_RTX;
5247 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5248 It serves no useful purpose and breaks eliminate_regs. */
5249 if (GET_CODE (datum) == ASM_OPERANDS)
5250 return NULL_RTX;
5252 /* Notes with side effects are dangerous. Even if the side-effect
5253 initially mirrors one in PATTERN (INSN), later optimizations
5254 might alter the way that the final register value is calculated
5255 and so move or alter the side-effect in some way. The note would
5256 then no longer be a valid substitution for SET_SRC. */
5257 if (side_effects_p (datum))
5258 return NULL_RTX;
5259 break;
5261 default:
5262 break;
5265 if (note)
5266 XEXP (note, 0) = datum;
5267 else
5269 add_reg_note (insn, kind, datum);
5270 note = REG_NOTES (insn);
5273 switch (kind)
5275 case REG_EQUAL:
5276 case REG_EQUIV:
5277 df_notes_rescan (as_a <rtx_insn *> (insn));
5278 break;
5279 default:
5280 break;
5283 return note;
5286 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5288 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5290 rtx set = set_for_reg_notes (insn);
5292 if (set && SET_DEST (set) == dst)
5293 return set_unique_reg_note (insn, kind, datum);
5294 return NULL_RTX;
5297 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5298 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5299 is true.
5301 If X is a label, it is simply added into the insn chain. */
5303 rtx_insn *
5304 emit (rtx x, bool allow_barrier_p)
5306 enum rtx_code code = classify_insn (x);
5308 switch (code)
5310 case CODE_LABEL:
5311 return emit_label (x);
5312 case INSN:
5313 return emit_insn (x);
5314 case JUMP_INSN:
5316 rtx_insn *insn = emit_jump_insn (x);
5317 if (allow_barrier_p
5318 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5319 return emit_barrier ();
5320 return insn;
5322 case CALL_INSN:
5323 return emit_call_insn (x);
5324 case DEBUG_INSN:
5325 return emit_debug_insn (x);
5326 default:
5327 gcc_unreachable ();
5331 /* Space for free sequence stack entries. */
5332 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5334 /* Begin emitting insns to a sequence. If this sequence will contain
5335 something that might cause the compiler to pop arguments to function
5336 calls (because those pops have previously been deferred; see
5337 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5338 before calling this function. That will ensure that the deferred
5339 pops are not accidentally emitted in the middle of this sequence. */
5341 void
5342 start_sequence (void)
5344 struct sequence_stack *tem;
5346 if (free_sequence_stack != NULL)
5348 tem = free_sequence_stack;
5349 free_sequence_stack = tem->next;
5351 else
5352 tem = ggc_alloc<sequence_stack> ();
5354 tem->next = get_current_sequence ()->next;
5355 tem->first = get_insns ();
5356 tem->last = get_last_insn ();
5357 get_current_sequence ()->next = tem;
5359 set_first_insn (0);
5360 set_last_insn (0);
5363 /* Set up the insn chain starting with FIRST as the current sequence,
5364 saving the previously current one. See the documentation for
5365 start_sequence for more information about how to use this function. */
5367 void
5368 push_to_sequence (rtx_insn *first)
5370 rtx_insn *last;
5372 start_sequence ();
5374 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5377 set_first_insn (first);
5378 set_last_insn (last);
5381 /* Like push_to_sequence, but take the last insn as an argument to avoid
5382 looping through the list. */
5384 void
5385 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5387 start_sequence ();
5389 set_first_insn (first);
5390 set_last_insn (last);
5393 /* Set up the outer-level insn chain
5394 as the current sequence, saving the previously current one. */
5396 void
5397 push_topmost_sequence (void)
5399 struct sequence_stack *top;
5401 start_sequence ();
5403 top = get_topmost_sequence ();
5404 set_first_insn (top->first);
5405 set_last_insn (top->last);
5408 /* After emitting to the outer-level insn chain, update the outer-level
5409 insn chain, and restore the previous saved state. */
5411 void
5412 pop_topmost_sequence (void)
5414 struct sequence_stack *top;
5416 top = get_topmost_sequence ();
5417 top->first = get_insns ();
5418 top->last = get_last_insn ();
5420 end_sequence ();
5423 /* After emitting to a sequence, restore previous saved state.
5425 To get the contents of the sequence just made, you must call
5426 `get_insns' *before* calling here.
5428 If the compiler might have deferred popping arguments while
5429 generating this sequence, and this sequence will not be immediately
5430 inserted into the instruction stream, use do_pending_stack_adjust
5431 before calling get_insns. That will ensure that the deferred
5432 pops are inserted into this sequence, and not into some random
5433 location in the instruction stream. See INHIBIT_DEFER_POP for more
5434 information about deferred popping of arguments. */
5436 void
5437 end_sequence (void)
5439 struct sequence_stack *tem = get_current_sequence ()->next;
5441 set_first_insn (tem->first);
5442 set_last_insn (tem->last);
5443 get_current_sequence ()->next = tem->next;
5445 memset (tem, 0, sizeof (*tem));
5446 tem->next = free_sequence_stack;
5447 free_sequence_stack = tem;
5450 /* Return 1 if currently emitting into a sequence. */
5453 in_sequence_p (void)
5455 return get_current_sequence ()->next != 0;
5458 /* Put the various virtual registers into REGNO_REG_RTX. */
5460 static void
5461 init_virtual_regs (void)
5463 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5464 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5465 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5466 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5467 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5468 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5469 = virtual_preferred_stack_boundary_rtx;
5473 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5474 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5475 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5476 static int copy_insn_n_scratches;
5478 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5479 copied an ASM_OPERANDS.
5480 In that case, it is the original input-operand vector. */
5481 static rtvec orig_asm_operands_vector;
5483 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5484 copied an ASM_OPERANDS.
5485 In that case, it is the copied input-operand vector. */
5486 static rtvec copy_asm_operands_vector;
5488 /* Likewise for the constraints vector. */
5489 static rtvec orig_asm_constraints_vector;
5490 static rtvec copy_asm_constraints_vector;
5492 /* Recursively create a new copy of an rtx for copy_insn.
5493 This function differs from copy_rtx in that it handles SCRATCHes and
5494 ASM_OPERANDs properly.
5495 Normally, this function is not used directly; use copy_insn as front end.
5496 However, you could first copy an insn pattern with copy_insn and then use
5497 this function afterwards to properly copy any REG_NOTEs containing
5498 SCRATCHes. */
5501 copy_insn_1 (rtx orig)
5503 rtx copy;
5504 int i, j;
5505 RTX_CODE code;
5506 const char *format_ptr;
5508 if (orig == NULL)
5509 return NULL;
5511 code = GET_CODE (orig);
5513 switch (code)
5515 case REG:
5516 case DEBUG_EXPR:
5517 CASE_CONST_ANY:
5518 case SYMBOL_REF:
5519 case CODE_LABEL:
5520 case PC:
5521 case CC0:
5522 case RETURN:
5523 case SIMPLE_RETURN:
5524 return orig;
5525 case CLOBBER:
5526 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5527 clobbers or clobbers of hard registers that originated as pseudos.
5528 This is needed to allow safe register renaming. */
5529 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5530 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5531 return orig;
5532 break;
5534 case SCRATCH:
5535 for (i = 0; i < copy_insn_n_scratches; i++)
5536 if (copy_insn_scratch_in[i] == orig)
5537 return copy_insn_scratch_out[i];
5538 break;
5540 case CONST:
5541 if (shared_const_p (orig))
5542 return orig;
5543 break;
5545 /* A MEM with a constant address is not sharable. The problem is that
5546 the constant address may need to be reloaded. If the mem is shared,
5547 then reloading one copy of this mem will cause all copies to appear
5548 to have been reloaded. */
5550 default:
5551 break;
5554 /* Copy the various flags, fields, and other information. We assume
5555 that all fields need copying, and then clear the fields that should
5556 not be copied. That is the sensible default behavior, and forces
5557 us to explicitly document why we are *not* copying a flag. */
5558 copy = shallow_copy_rtx (orig);
5560 /* We do not copy the USED flag, which is used as a mark bit during
5561 walks over the RTL. */
5562 RTX_FLAG (copy, used) = 0;
5564 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5565 if (INSN_P (orig))
5567 RTX_FLAG (copy, jump) = 0;
5568 RTX_FLAG (copy, call) = 0;
5569 RTX_FLAG (copy, frame_related) = 0;
5572 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5574 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5575 switch (*format_ptr++)
5577 case 'e':
5578 if (XEXP (orig, i) != NULL)
5579 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5580 break;
5582 case 'E':
5583 case 'V':
5584 if (XVEC (orig, i) == orig_asm_constraints_vector)
5585 XVEC (copy, i) = copy_asm_constraints_vector;
5586 else if (XVEC (orig, i) == orig_asm_operands_vector)
5587 XVEC (copy, i) = copy_asm_operands_vector;
5588 else if (XVEC (orig, i) != NULL)
5590 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5591 for (j = 0; j < XVECLEN (copy, i); j++)
5592 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5594 break;
5596 case 't':
5597 case 'w':
5598 case 'i':
5599 case 's':
5600 case 'S':
5601 case 'u':
5602 case '0':
5603 /* These are left unchanged. */
5604 break;
5606 default:
5607 gcc_unreachable ();
5610 if (code == SCRATCH)
5612 i = copy_insn_n_scratches++;
5613 gcc_assert (i < MAX_RECOG_OPERANDS);
5614 copy_insn_scratch_in[i] = orig;
5615 copy_insn_scratch_out[i] = copy;
5617 else if (code == ASM_OPERANDS)
5619 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5620 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5621 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5622 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5625 return copy;
5628 /* Create a new copy of an rtx.
5629 This function differs from copy_rtx in that it handles SCRATCHes and
5630 ASM_OPERANDs properly.
5631 INSN doesn't really have to be a full INSN; it could be just the
5632 pattern. */
5634 copy_insn (rtx insn)
5636 copy_insn_n_scratches = 0;
5637 orig_asm_operands_vector = 0;
5638 orig_asm_constraints_vector = 0;
5639 copy_asm_operands_vector = 0;
5640 copy_asm_constraints_vector = 0;
5641 return copy_insn_1 (insn);
5644 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5645 on that assumption that INSN itself remains in its original place. */
5647 rtx_insn *
5648 copy_delay_slot_insn (rtx_insn *insn)
5650 /* Copy INSN with its rtx_code, all its notes, location etc. */
5651 insn = as_a <rtx_insn *> (copy_rtx (insn));
5652 INSN_UID (insn) = cur_insn_uid++;
5653 return insn;
5656 /* Initialize data structures and variables in this file
5657 before generating rtl for each function. */
5659 void
5660 init_emit (void)
5662 set_first_insn (NULL);
5663 set_last_insn (NULL);
5664 if (MIN_NONDEBUG_INSN_UID)
5665 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5666 else
5667 cur_insn_uid = 1;
5668 cur_debug_insn_uid = 1;
5669 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5670 first_label_num = label_num;
5671 get_current_sequence ()->next = NULL;
5673 /* Init the tables that describe all the pseudo regs. */
5675 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5677 crtl->emit.regno_pointer_align
5678 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5680 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5682 /* Put copies of all the hard registers into regno_reg_rtx. */
5683 memcpy (regno_reg_rtx,
5684 initial_regno_reg_rtx,
5685 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5687 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5688 init_virtual_regs ();
5690 /* Indicate that the virtual registers and stack locations are
5691 all pointers. */
5692 REG_POINTER (stack_pointer_rtx) = 1;
5693 REG_POINTER (frame_pointer_rtx) = 1;
5694 REG_POINTER (hard_frame_pointer_rtx) = 1;
5695 REG_POINTER (arg_pointer_rtx) = 1;
5697 REG_POINTER (virtual_incoming_args_rtx) = 1;
5698 REG_POINTER (virtual_stack_vars_rtx) = 1;
5699 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5700 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5701 REG_POINTER (virtual_cfa_rtx) = 1;
5703 #ifdef STACK_BOUNDARY
5704 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5705 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5706 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5707 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5709 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5710 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5713 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5714 #endif
5716 #ifdef INIT_EXPANDERS
5717 INIT_EXPANDERS;
5718 #endif
5721 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5723 static rtx
5724 gen_const_vector (machine_mode mode, int constant)
5726 rtx tem;
5727 rtvec v;
5728 int units, i;
5729 machine_mode inner;
5731 units = GET_MODE_NUNITS (mode);
5732 inner = GET_MODE_INNER (mode);
5734 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5736 v = rtvec_alloc (units);
5738 /* We need to call this function after we set the scalar const_tiny_rtx
5739 entries. */
5740 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5742 for (i = 0; i < units; ++i)
5743 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5745 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5746 return tem;
5749 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5750 all elements are zero, and the one vector when all elements are one. */
5752 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5754 machine_mode inner = GET_MODE_INNER (mode);
5755 int nunits = GET_MODE_NUNITS (mode);
5756 rtx x;
5757 int i;
5759 /* Check to see if all of the elements have the same value. */
5760 x = RTVEC_ELT (v, nunits - 1);
5761 for (i = nunits - 2; i >= 0; i--)
5762 if (RTVEC_ELT (v, i) != x)
5763 break;
5765 /* If the values are all the same, check to see if we can use one of the
5766 standard constant vectors. */
5767 if (i == -1)
5769 if (x == CONST0_RTX (inner))
5770 return CONST0_RTX (mode);
5771 else if (x == CONST1_RTX (inner))
5772 return CONST1_RTX (mode);
5773 else if (x == CONSTM1_RTX (inner))
5774 return CONSTM1_RTX (mode);
5777 return gen_rtx_raw_CONST_VECTOR (mode, v);
5780 /* Initialise global register information required by all functions. */
5782 void
5783 init_emit_regs (void)
5785 int i;
5786 machine_mode mode;
5787 mem_attrs *attrs;
5789 /* Reset register attributes */
5790 reg_attrs_htab->empty ();
5792 /* We need reg_raw_mode, so initialize the modes now. */
5793 init_reg_modes_target ();
5795 /* Assign register numbers to the globally defined register rtx. */
5796 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5797 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5798 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5799 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5800 virtual_incoming_args_rtx =
5801 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5802 virtual_stack_vars_rtx =
5803 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5804 virtual_stack_dynamic_rtx =
5805 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5806 virtual_outgoing_args_rtx =
5807 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5808 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5809 virtual_preferred_stack_boundary_rtx =
5810 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5812 /* Initialize RTL for commonly used hard registers. These are
5813 copied into regno_reg_rtx as we begin to compile each function. */
5814 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5815 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5817 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5818 return_address_pointer_rtx
5819 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5820 #endif
5822 pic_offset_table_rtx = NULL_RTX;
5823 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5824 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5826 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5828 mode = (machine_mode) i;
5829 attrs = ggc_cleared_alloc<mem_attrs> ();
5830 attrs->align = BITS_PER_UNIT;
5831 attrs->addrspace = ADDR_SPACE_GENERIC;
5832 if (mode != BLKmode)
5834 attrs->size_known_p = true;
5835 attrs->size = GET_MODE_SIZE (mode);
5836 if (STRICT_ALIGNMENT)
5837 attrs->align = GET_MODE_ALIGNMENT (mode);
5839 mode_mem_attrs[i] = attrs;
5843 /* Initialize global machine_mode variables. */
5845 void
5846 init_derived_machine_modes (void)
5848 byte_mode = VOIDmode;
5849 word_mode = VOIDmode;
5851 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5852 mode != VOIDmode;
5853 mode = GET_MODE_WIDER_MODE (mode))
5855 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5856 && byte_mode == VOIDmode)
5857 byte_mode = mode;
5859 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5860 && word_mode == VOIDmode)
5861 word_mode = mode;
5864 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5867 /* Create some permanent unique rtl objects shared between all functions. */
5869 void
5870 init_emit_once (void)
5872 int i;
5873 machine_mode mode;
5874 machine_mode double_mode;
5876 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5877 CONST_FIXED, and memory attribute hash tables. */
5878 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5880 #if TARGET_SUPPORTS_WIDE_INT
5881 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5882 #endif
5883 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5885 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5887 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5889 #ifdef INIT_EXPANDERS
5890 /* This is to initialize {init|mark|free}_machine_status before the first
5891 call to push_function_context_to. This is needed by the Chill front
5892 end which calls push_function_context_to before the first call to
5893 init_function_start. */
5894 INIT_EXPANDERS;
5895 #endif
5897 /* Create the unique rtx's for certain rtx codes and operand values. */
5899 /* Process stack-limiting command-line options. */
5900 if (opt_fstack_limit_symbol_arg != NULL)
5901 stack_limit_rtx
5902 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5903 if (opt_fstack_limit_register_no >= 0)
5904 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5906 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5907 tries to use these variables. */
5908 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5909 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5910 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5912 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5913 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5914 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5915 else
5916 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5918 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5920 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5921 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5922 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5924 dconstm1 = dconst1;
5925 dconstm1.sign = 1;
5927 dconsthalf = dconst1;
5928 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5930 for (i = 0; i < 3; i++)
5932 const REAL_VALUE_TYPE *const r =
5933 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5936 mode != VOIDmode;
5937 mode = GET_MODE_WIDER_MODE (mode))
5938 const_tiny_rtx[i][(int) mode] =
5939 const_double_from_real_value (*r, mode);
5941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5942 mode != VOIDmode;
5943 mode = GET_MODE_WIDER_MODE (mode))
5944 const_tiny_rtx[i][(int) mode] =
5945 const_double_from_real_value (*r, mode);
5947 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5950 mode != VOIDmode;
5951 mode = GET_MODE_WIDER_MODE (mode))
5952 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5954 for (mode = MIN_MODE_PARTIAL_INT;
5955 mode <= MAX_MODE_PARTIAL_INT;
5956 mode = (machine_mode)((int)(mode) + 1))
5957 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5960 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5965 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5967 for (mode = MIN_MODE_PARTIAL_INT;
5968 mode <= MAX_MODE_PARTIAL_INT;
5969 mode = (machine_mode)((int)(mode) + 1))
5970 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5973 mode != VOIDmode;
5974 mode = GET_MODE_WIDER_MODE (mode))
5976 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5977 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5981 mode != VOIDmode;
5982 mode = GET_MODE_WIDER_MODE (mode))
5984 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5985 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5992 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5993 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5994 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5997 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5998 mode != VOIDmode;
5999 mode = GET_MODE_WIDER_MODE (mode))
6001 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6002 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6006 mode != VOIDmode;
6007 mode = GET_MODE_WIDER_MODE (mode))
6009 FCONST0 (mode).data.high = 0;
6010 FCONST0 (mode).data.low = 0;
6011 FCONST0 (mode).mode = mode;
6012 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6013 FCONST0 (mode), mode);
6016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6017 mode != VOIDmode;
6018 mode = GET_MODE_WIDER_MODE (mode))
6020 FCONST0 (mode).data.high = 0;
6021 FCONST0 (mode).data.low = 0;
6022 FCONST0 (mode).mode = mode;
6023 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6024 FCONST0 (mode), mode);
6027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6028 mode != VOIDmode;
6029 mode = GET_MODE_WIDER_MODE (mode))
6031 FCONST0 (mode).data.high = 0;
6032 FCONST0 (mode).data.low = 0;
6033 FCONST0 (mode).mode = mode;
6034 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6035 FCONST0 (mode), mode);
6037 /* We store the value 1. */
6038 FCONST1 (mode).data.high = 0;
6039 FCONST1 (mode).data.low = 0;
6040 FCONST1 (mode).mode = mode;
6041 FCONST1 (mode).data
6042 = double_int_one.lshift (GET_MODE_FBIT (mode),
6043 HOST_BITS_PER_DOUBLE_INT,
6044 SIGNED_FIXED_POINT_MODE_P (mode));
6045 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6046 FCONST1 (mode), mode);
6049 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6050 mode != VOIDmode;
6051 mode = GET_MODE_WIDER_MODE (mode))
6053 FCONST0 (mode).data.high = 0;
6054 FCONST0 (mode).data.low = 0;
6055 FCONST0 (mode).mode = mode;
6056 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6057 FCONST0 (mode), mode);
6059 /* We store the value 1. */
6060 FCONST1 (mode).data.high = 0;
6061 FCONST1 (mode).data.low = 0;
6062 FCONST1 (mode).mode = mode;
6063 FCONST1 (mode).data
6064 = double_int_one.lshift (GET_MODE_FBIT (mode),
6065 HOST_BITS_PER_DOUBLE_INT,
6066 SIGNED_FIXED_POINT_MODE_P (mode));
6067 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6068 FCONST1 (mode), mode);
6071 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6072 mode != VOIDmode;
6073 mode = GET_MODE_WIDER_MODE (mode))
6075 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6078 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6079 mode != VOIDmode;
6080 mode = GET_MODE_WIDER_MODE (mode))
6082 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6085 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6086 mode != VOIDmode;
6087 mode = GET_MODE_WIDER_MODE (mode))
6089 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6090 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6094 mode != VOIDmode;
6095 mode = GET_MODE_WIDER_MODE (mode))
6097 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6098 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6101 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6102 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6103 const_tiny_rtx[0][i] = const0_rtx;
6105 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6106 if (STORE_FLAG_VALUE == 1)
6107 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6109 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6110 mode != VOIDmode;
6111 mode = GET_MODE_WIDER_MODE (mode))
6113 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6114 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6117 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6118 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6119 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6120 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6121 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6122 /*prev_insn=*/NULL,
6123 /*next_insn=*/NULL,
6124 /*bb=*/NULL,
6125 /*pattern=*/NULL_RTX,
6126 /*location=*/-1,
6127 CODE_FOR_nothing,
6128 /*reg_notes=*/NULL_RTX);
6131 /* Produce exact duplicate of insn INSN after AFTER.
6132 Care updating of libcall regions if present. */
6134 rtx_insn *
6135 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6137 rtx_insn *new_rtx;
6138 rtx link;
6140 switch (GET_CODE (insn))
6142 case INSN:
6143 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6144 break;
6146 case JUMP_INSN:
6147 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6148 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6149 break;
6151 case DEBUG_INSN:
6152 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6153 break;
6155 case CALL_INSN:
6156 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6157 if (CALL_INSN_FUNCTION_USAGE (insn))
6158 CALL_INSN_FUNCTION_USAGE (new_rtx)
6159 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6160 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6161 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6162 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6163 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6164 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6165 break;
6167 default:
6168 gcc_unreachable ();
6171 /* Update LABEL_NUSES. */
6172 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6174 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6176 /* If the old insn is frame related, then so is the new one. This is
6177 primarily needed for IA-64 unwind info which marks epilogue insns,
6178 which may be duplicated by the basic block reordering code. */
6179 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6181 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6182 will make them. REG_LABEL_TARGETs are created there too, but are
6183 supposed to be sticky, so we copy them. */
6184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6185 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6187 if (GET_CODE (link) == EXPR_LIST)
6188 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6189 copy_insn_1 (XEXP (link, 0)));
6190 else
6191 add_shallow_copy_of_reg_note (new_rtx, link);
6194 INSN_CODE (new_rtx) = INSN_CODE (insn);
6195 return new_rtx;
6198 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6200 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6202 if (hard_reg_clobbers[mode][regno])
6203 return hard_reg_clobbers[mode][regno];
6204 else
6205 return (hard_reg_clobbers[mode][regno] =
6206 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6209 location_t prologue_location;
6210 location_t epilogue_location;
6212 /* Hold current location information and last location information, so the
6213 datastructures are built lazily only when some instructions in given
6214 place are needed. */
6215 static location_t curr_location;
6217 /* Allocate insn location datastructure. */
6218 void
6219 insn_locations_init (void)
6221 prologue_location = epilogue_location = 0;
6222 curr_location = UNKNOWN_LOCATION;
6225 /* At the end of emit stage, clear current location. */
6226 void
6227 insn_locations_finalize (void)
6229 epilogue_location = curr_location;
6230 curr_location = UNKNOWN_LOCATION;
6233 /* Set current location. */
6234 void
6235 set_curr_insn_location (location_t location)
6237 curr_location = location;
6240 /* Get current location. */
6241 location_t
6242 curr_insn_location (void)
6244 return curr_location;
6247 /* Return lexical scope block insn belongs to. */
6248 tree
6249 insn_scope (const rtx_insn *insn)
6251 return LOCATION_BLOCK (INSN_LOCATION (insn));
6254 /* Return line number of the statement that produced this insn. */
6256 insn_line (const rtx_insn *insn)
6258 return LOCATION_LINE (INSN_LOCATION (insn));
6261 /* Return source file of the statement that produced this insn. */
6262 const char *
6263 insn_file (const rtx_insn *insn)
6265 return LOCATION_FILE (INSN_LOCATION (insn));
6268 /* Return expanded location of the statement that produced this insn. */
6269 expanded_location
6270 insn_location (const rtx_insn *insn)
6272 return expand_location (INSN_LOCATION (insn));
6275 /* Return true if memory model MODEL requires a pre-operation (release-style)
6276 barrier or a post-operation (acquire-style) barrier. While not universal,
6277 this function matches behavior of several targets. */
6279 bool
6280 need_atomic_barrier_p (enum memmodel model, bool pre)
6282 switch (model & MEMMODEL_BASE_MASK)
6284 case MEMMODEL_RELAXED:
6285 case MEMMODEL_CONSUME:
6286 return false;
6287 case MEMMODEL_RELEASE:
6288 return pre;
6289 case MEMMODEL_ACQUIRE:
6290 return !pre;
6291 case MEMMODEL_ACQ_REL:
6292 case MEMMODEL_SEQ_CST:
6293 return true;
6294 default:
6295 gcc_unreachable ();
6299 #include "gt-emit-rtl.h"