PR ipa/64481
[official-gcc.git] / gcc / emit-rtl.c
blob43d924280747ed4e49a72323894fb010145bbee7
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "machmode.h"
42 #include "vec.h"
43 #include "double-int.h"
44 #include "input.h"
45 #include "alias.h"
46 #include "symtab.h"
47 #include "wide-int.h"
48 #include "inchash.h"
49 #include "real.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "predict.h"
54 #include "hard-reg-set.h"
55 #include "input.h"
56 #include "function.h"
57 #include "cfgrtl.h"
58 #include "basic-block.h"
59 #include "tree-eh.h"
60 #include "tm_p.h"
61 #include "flags.h"
62 #include "stringpool.h"
63 #include "expr.h"
64 #include "regs.h"
65 #include "insn-config.h"
66 #include "recog.h"
67 #include "bitmap.h"
68 #include "debug.h"
69 #include "langhooks.h"
70 #include "df.h"
71 #include "params.h"
72 #include "target.h"
73 #include "builtins.h"
74 #include "rtl-iter.h"
76 struct target_rtl default_target_rtl;
77 #if SWITCHABLE_TARGET
78 struct target_rtl *this_target_rtl = &default_target_rtl;
79 #endif
81 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
83 /* Commonly used modes. */
85 machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
86 machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
87 machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
88 machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
90 /* Datastructures maintained for currently processed function in RTL form. */
92 struct rtl_data x_rtl;
94 /* Indexed by pseudo register number, gives the rtx for that pseudo.
95 Allocated in parallel with regno_pointer_align.
96 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
97 with length attribute nested in top level structures. */
99 rtx * regno_reg_rtx;
101 /* This is *not* reset after each function. It gives each CODE_LABEL
102 in the entire compilation a unique label number. */
104 static GTY(()) int label_num = 1;
106 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
107 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
108 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
109 is set only for MODE_INT and MODE_VECTOR_INT modes. */
111 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
113 rtx const_true_rtx;
115 REAL_VALUE_TYPE dconst0;
116 REAL_VALUE_TYPE dconst1;
117 REAL_VALUE_TYPE dconst2;
118 REAL_VALUE_TYPE dconstm1;
119 REAL_VALUE_TYPE dconsthalf;
121 /* Record fixed-point constant 0 and 1. */
122 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
123 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
125 /* We make one copy of (const_int C) where C is in
126 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
127 to save space during the compilation and simplify comparisons of
128 integers. */
130 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
132 /* Standard pieces of rtx, to be substituted directly into things. */
133 rtx pc_rtx;
134 rtx ret_rtx;
135 rtx simple_return_rtx;
136 rtx cc0_rtx;
138 /* A hash table storing CONST_INTs whose absolute value is greater
139 than MAX_SAVED_CONST_INT. */
141 struct const_int_hasher : ggc_cache_hasher<rtx>
143 typedef HOST_WIDE_INT compare_type;
145 static hashval_t hash (rtx i);
146 static bool equal (rtx i, HOST_WIDE_INT h);
149 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
151 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
153 static hashval_t hash (rtx x);
154 static bool equal (rtx x, rtx y);
157 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
159 /* A hash table storing register attribute structures. */
160 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
162 static hashval_t hash (reg_attrs *x);
163 static bool equal (reg_attrs *a, reg_attrs *b);
166 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
168 /* A hash table storing all CONST_DOUBLEs. */
169 struct const_double_hasher : ggc_cache_hasher<rtx>
171 static hashval_t hash (rtx x);
172 static bool equal (rtx x, rtx y);
175 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
177 /* A hash table storing all CONST_FIXEDs. */
178 struct const_fixed_hasher : ggc_cache_hasher<rtx>
180 static hashval_t hash (rtx x);
181 static bool equal (rtx x, rtx y);
184 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
186 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
187 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
188 #define first_label_num (crtl->emit.x_first_label_num)
190 static void set_used_decls (tree);
191 static void mark_label_nuses (rtx);
192 #if TARGET_SUPPORTS_WIDE_INT
193 static rtx lookup_const_wide_int (rtx);
194 #endif
195 static rtx lookup_const_double (rtx);
196 static rtx lookup_const_fixed (rtx);
197 static reg_attrs *get_reg_attrs (tree, int);
198 static rtx gen_const_vector (machine_mode, int);
199 static void copy_rtx_if_shared_1 (rtx *orig);
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
207 hashval_t
208 const_int_hasher::hash (rtx x)
210 return (hashval_t) INTVAL (x);
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
217 bool
218 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
220 return (INTVAL (x) == y);
223 #if TARGET_SUPPORTS_WIDE_INT
224 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
226 hashval_t
227 const_wide_int_hasher::hash (rtx x)
229 int i;
230 unsigned HOST_WIDE_INT hash = 0;
231 const_rtx xr = x;
233 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
234 hash += CONST_WIDE_INT_ELT (xr, i);
236 return (hashval_t) hash;
239 /* Returns nonzero if the value represented by X (which is really a
240 CONST_WIDE_INT) is the same as that given by Y (which is really a
241 CONST_WIDE_INT). */
243 bool
244 const_wide_int_hasher::equal (rtx x, rtx y)
246 int i;
247 const_rtx xr = x;
248 const_rtx yr = y;
249 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
250 return false;
252 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
253 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
254 return false;
256 return true;
258 #endif
260 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
261 hashval_t
262 const_double_hasher::hash (rtx x)
264 const_rtx const value = x;
265 hashval_t h;
267 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
268 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
269 else
271 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
272 /* MODE is used in the comparison, so it should be in the hash. */
273 h ^= GET_MODE (value);
275 return h;
278 /* Returns nonzero if the value represented by X (really a ...)
279 is the same as that represented by Y (really a ...) */
280 bool
281 const_double_hasher::equal (rtx x, rtx y)
283 const_rtx const a = x, b = y;
285 if (GET_MODE (a) != GET_MODE (b))
286 return 0;
287 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
288 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
289 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
290 else
291 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
292 CONST_DOUBLE_REAL_VALUE (b));
295 /* Returns a hash code for X (which is really a CONST_FIXED). */
297 hashval_t
298 const_fixed_hasher::hash (rtx x)
300 const_rtx const value = x;
301 hashval_t h;
303 h = fixed_hash (CONST_FIXED_VALUE (value));
304 /* MODE is used in the comparison, so it should be in the hash. */
305 h ^= GET_MODE (value);
306 return h;
309 /* Returns nonzero if the value represented by X is the same as that
310 represented by Y. */
312 bool
313 const_fixed_hasher::equal (rtx x, rtx y)
315 const_rtx const a = x, b = y;
317 if (GET_MODE (a) != GET_MODE (b))
318 return 0;
319 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
322 /* Return true if the given memory attributes are equal. */
324 bool
325 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
327 if (p == q)
328 return true;
329 if (!p || !q)
330 return false;
331 return (p->alias == q->alias
332 && p->offset_known_p == q->offset_known_p
333 && (!p->offset_known_p || p->offset == q->offset)
334 && p->size_known_p == q->size_known_p
335 && (!p->size_known_p || p->size == q->size)
336 && p->align == q->align
337 && p->addrspace == q->addrspace
338 && (p->expr == q->expr
339 || (p->expr != NULL_TREE && q->expr != NULL_TREE
340 && operand_equal_p (p->expr, q->expr, 0))));
343 /* Set MEM's memory attributes so that they are the same as ATTRS. */
345 static void
346 set_mem_attrs (rtx mem, mem_attrs *attrs)
348 /* If everything is the default, we can just clear the attributes. */
349 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
351 MEM_ATTRS (mem) = 0;
352 return;
355 if (!MEM_ATTRS (mem)
356 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
358 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
359 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
363 /* Returns a hash code for X (which is a really a reg_attrs *). */
365 hashval_t
366 reg_attr_hasher::hash (reg_attrs *x)
368 const reg_attrs *const p = x;
370 return ((p->offset * 1000) ^ (intptr_t) p->decl);
373 /* Returns nonzero if the value represented by X is the same as that given by
374 Y. */
376 bool
377 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
379 const reg_attrs *const p = x;
380 const reg_attrs *const q = y;
382 return (p->decl == q->decl && p->offset == q->offset);
384 /* Allocate a new reg_attrs structure and insert it into the hash table if
385 one identical to it is not already in the table. We are doing this for
386 MEM of mode MODE. */
388 static reg_attrs *
389 get_reg_attrs (tree decl, int offset)
391 reg_attrs attrs;
393 /* If everything is the default, we can just return zero. */
394 if (decl == 0 && offset == 0)
395 return 0;
397 attrs.decl = decl;
398 attrs.offset = offset;
400 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
401 if (*slot == 0)
403 *slot = ggc_alloc<reg_attrs> ();
404 memcpy (*slot, &attrs, sizeof (reg_attrs));
407 return *slot;
411 #if !HAVE_blockage
412 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
413 and to block register equivalences to be seen across this insn. */
416 gen_blockage (void)
418 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
419 MEM_VOLATILE_P (x) = true;
420 return x;
422 #endif
425 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
426 don't attempt to share with the various global pieces of rtl (such as
427 frame_pointer_rtx). */
430 gen_raw_REG (machine_mode mode, int regno)
432 rtx x = gen_rtx_raw_REG (mode, regno);
433 ORIGINAL_REGNO (x) = regno;
434 return x;
437 /* There are some RTL codes that require special attention; the generation
438 functions do the raw handling. If you add to this list, modify
439 special_rtx in gengenrtl.c as well. */
441 rtx_expr_list *
442 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
444 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
445 expr_list));
448 rtx_insn_list *
449 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
451 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
452 insn_list));
455 rtx_insn *
456 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
457 basic_block bb, rtx pattern, int location, int code,
458 rtx reg_notes)
460 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
461 prev_insn, next_insn,
462 bb, pattern, location, code,
463 reg_notes));
467 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
469 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
470 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
472 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
473 if (const_true_rtx && arg == STORE_FLAG_VALUE)
474 return const_true_rtx;
475 #endif
477 /* Look up the CONST_INT in the hash table. */
478 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
479 INSERT);
480 if (*slot == 0)
481 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
483 return *slot;
487 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
489 return GEN_INT (trunc_int_for_mode (c, mode));
492 /* CONST_DOUBLEs might be created from pairs of integers, or from
493 REAL_VALUE_TYPEs. Also, their length is known only at run time,
494 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
496 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
497 hash table. If so, return its counterpart; otherwise add it
498 to the hash table and return it. */
499 static rtx
500 lookup_const_double (rtx real)
502 rtx *slot = const_double_htab->find_slot (real, INSERT);
503 if (*slot == 0)
504 *slot = real;
506 return *slot;
509 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
510 VALUE in mode MODE. */
512 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
514 rtx real = rtx_alloc (CONST_DOUBLE);
515 PUT_MODE (real, mode);
517 real->u.rv = value;
519 return lookup_const_double (real);
522 /* Determine whether FIXED, a CONST_FIXED, already exists in the
523 hash table. If so, return its counterpart; otherwise add it
524 to the hash table and return it. */
526 static rtx
527 lookup_const_fixed (rtx fixed)
529 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
530 if (*slot == 0)
531 *slot = fixed;
533 return *slot;
536 /* Return a CONST_FIXED rtx for a fixed-point value specified by
537 VALUE in mode MODE. */
540 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
542 rtx fixed = rtx_alloc (CONST_FIXED);
543 PUT_MODE (fixed, mode);
545 fixed->u.fv = value;
547 return lookup_const_fixed (fixed);
550 #if TARGET_SUPPORTS_WIDE_INT == 0
551 /* Constructs double_int from rtx CST. */
553 double_int
554 rtx_to_double_int (const_rtx cst)
556 double_int r;
558 if (CONST_INT_P (cst))
559 r = double_int::from_shwi (INTVAL (cst));
560 else if (CONST_DOUBLE_AS_INT_P (cst))
562 r.low = CONST_DOUBLE_LOW (cst);
563 r.high = CONST_DOUBLE_HIGH (cst);
565 else
566 gcc_unreachable ();
568 return r;
570 #endif
572 #if TARGET_SUPPORTS_WIDE_INT
573 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
574 If so, return its counterpart; otherwise add it to the hash table and
575 return it. */
577 static rtx
578 lookup_const_wide_int (rtx wint)
580 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
581 if (*slot == 0)
582 *slot = wint;
584 return *slot;
586 #endif
588 /* Return an rtx constant for V, given that the constant has mode MODE.
589 The returned rtx will be a CONST_INT if V fits, otherwise it will be
590 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
591 (if TARGET_SUPPORTS_WIDE_INT). */
594 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
596 unsigned int len = v.get_len ();
597 unsigned int prec = GET_MODE_PRECISION (mode);
599 /* Allow truncation but not extension since we do not know if the
600 number is signed or unsigned. */
601 gcc_assert (prec <= v.get_precision ());
603 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
604 return gen_int_mode (v.elt (0), mode);
606 #if TARGET_SUPPORTS_WIDE_INT
608 unsigned int i;
609 rtx value;
610 unsigned int blocks_needed
611 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
613 if (len > blocks_needed)
614 len = blocks_needed;
616 value = const_wide_int_alloc (len);
618 /* It is so tempting to just put the mode in here. Must control
619 myself ... */
620 PUT_MODE (value, VOIDmode);
621 CWI_PUT_NUM_ELEM (value, len);
623 for (i = 0; i < len; i++)
624 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
626 return lookup_const_wide_int (value);
628 #else
629 return immed_double_const (v.elt (0), v.elt (1), mode);
630 #endif
633 #if TARGET_SUPPORTS_WIDE_INT == 0
634 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
635 of ints: I0 is the low-order word and I1 is the high-order word.
636 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
637 implied upper bits are copies of the high bit of i1. The value
638 itself is neither signed nor unsigned. Do not use this routine for
639 non-integer modes; convert to REAL_VALUE_TYPE and use
640 CONST_DOUBLE_FROM_REAL_VALUE. */
643 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
645 rtx value;
646 unsigned int i;
648 /* There are the following cases (note that there are no modes with
649 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
651 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
652 gen_int_mode.
653 2) If the value of the integer fits into HOST_WIDE_INT anyway
654 (i.e., i1 consists only from copies of the sign bit, and sign
655 of i0 and i1 are the same), then we return a CONST_INT for i0.
656 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
657 if (mode != VOIDmode)
659 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
660 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
661 /* We can get a 0 for an error mark. */
662 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
663 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
664 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
666 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
667 return gen_int_mode (i0, mode);
670 /* If this integer fits in one word, return a CONST_INT. */
671 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
672 return GEN_INT (i0);
674 /* We use VOIDmode for integers. */
675 value = rtx_alloc (CONST_DOUBLE);
676 PUT_MODE (value, VOIDmode);
678 CONST_DOUBLE_LOW (value) = i0;
679 CONST_DOUBLE_HIGH (value) = i1;
681 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
682 XWINT (value, i) = 0;
684 return lookup_const_double (value);
686 #endif
689 gen_rtx_REG (machine_mode mode, unsigned int regno)
691 /* In case the MD file explicitly references the frame pointer, have
692 all such references point to the same frame pointer. This is
693 used during frame pointer elimination to distinguish the explicit
694 references to these registers from pseudos that happened to be
695 assigned to them.
697 If we have eliminated the frame pointer or arg pointer, we will
698 be using it as a normal register, for example as a spill
699 register. In such cases, we might be accessing it in a mode that
700 is not Pmode and therefore cannot use the pre-allocated rtx.
702 Also don't do this when we are making new REGs in reload, since
703 we don't want to get confused with the real pointers. */
705 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
707 if (regno == FRAME_POINTER_REGNUM
708 && (!reload_completed || frame_pointer_needed))
709 return frame_pointer_rtx;
710 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
711 if (regno == HARD_FRAME_POINTER_REGNUM
712 && (!reload_completed || frame_pointer_needed))
713 return hard_frame_pointer_rtx;
714 #endif
715 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
716 if (regno == ARG_POINTER_REGNUM)
717 return arg_pointer_rtx;
718 #endif
719 #ifdef RETURN_ADDRESS_POINTER_REGNUM
720 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
721 return return_address_pointer_rtx;
722 #endif
723 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
724 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
725 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
726 return pic_offset_table_rtx;
727 if (regno == STACK_POINTER_REGNUM)
728 return stack_pointer_rtx;
731 #if 0
732 /* If the per-function register table has been set up, try to re-use
733 an existing entry in that table to avoid useless generation of RTL.
735 This code is disabled for now until we can fix the various backends
736 which depend on having non-shared hard registers in some cases. Long
737 term we want to re-enable this code as it can significantly cut down
738 on the amount of useless RTL that gets generated.
740 We'll also need to fix some code that runs after reload that wants to
741 set ORIGINAL_REGNO. */
743 if (cfun
744 && cfun->emit
745 && regno_reg_rtx
746 && regno < FIRST_PSEUDO_REGISTER
747 && reg_raw_mode[regno] == mode)
748 return regno_reg_rtx[regno];
749 #endif
751 return gen_raw_REG (mode, regno);
755 gen_rtx_MEM (machine_mode mode, rtx addr)
757 rtx rt = gen_rtx_raw_MEM (mode, addr);
759 /* This field is not cleared by the mere allocation of the rtx, so
760 we clear it here. */
761 MEM_ATTRS (rt) = 0;
763 return rt;
766 /* Generate a memory referring to non-trapping constant memory. */
769 gen_const_mem (machine_mode mode, rtx addr)
771 rtx mem = gen_rtx_MEM (mode, addr);
772 MEM_READONLY_P (mem) = 1;
773 MEM_NOTRAP_P (mem) = 1;
774 return mem;
777 /* Generate a MEM referring to fixed portions of the frame, e.g., register
778 save areas. */
781 gen_frame_mem (machine_mode mode, rtx addr)
783 rtx mem = gen_rtx_MEM (mode, addr);
784 MEM_NOTRAP_P (mem) = 1;
785 set_mem_alias_set (mem, get_frame_alias_set ());
786 return mem;
789 /* Generate a MEM referring to a temporary use of the stack, not part
790 of the fixed stack frame. For example, something which is pushed
791 by a target splitter. */
793 gen_tmp_stack_mem (machine_mode mode, rtx addr)
795 rtx mem = gen_rtx_MEM (mode, addr);
796 MEM_NOTRAP_P (mem) = 1;
797 if (!cfun->calls_alloca)
798 set_mem_alias_set (mem, get_frame_alias_set ());
799 return mem;
802 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
803 this construct would be valid, and false otherwise. */
805 bool
806 validate_subreg (machine_mode omode, machine_mode imode,
807 const_rtx reg, unsigned int offset)
809 unsigned int isize = GET_MODE_SIZE (imode);
810 unsigned int osize = GET_MODE_SIZE (omode);
812 /* All subregs must be aligned. */
813 if (offset % osize != 0)
814 return false;
816 /* The subreg offset cannot be outside the inner object. */
817 if (offset >= isize)
818 return false;
820 /* ??? This should not be here. Temporarily continue to allow word_mode
821 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
822 Generally, backends are doing something sketchy but it'll take time to
823 fix them all. */
824 if (omode == word_mode)
826 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
827 is the culprit here, and not the backends. */
828 else if (osize >= UNITS_PER_WORD && isize >= osize)
830 /* Allow component subregs of complex and vector. Though given the below
831 extraction rules, it's not always clear what that means. */
832 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
833 && GET_MODE_INNER (imode) == omode)
835 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
836 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
837 represent this. It's questionable if this ought to be represented at
838 all -- why can't this all be hidden in post-reload splitters that make
839 arbitrarily mode changes to the registers themselves. */
840 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
842 /* Subregs involving floating point modes are not allowed to
843 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
844 (subreg:SI (reg:DF) 0) isn't. */
845 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
847 if (! (isize == osize
848 /* LRA can use subreg to store a floating point value in
849 an integer mode. Although the floating point and the
850 integer modes need the same number of hard registers,
851 the size of floating point mode can be less than the
852 integer mode. LRA also uses subregs for a register
853 should be used in different mode in on insn. */
854 || lra_in_progress))
855 return false;
858 /* Paradoxical subregs must have offset zero. */
859 if (osize > isize)
860 return offset == 0;
862 /* This is a normal subreg. Verify that the offset is representable. */
864 /* For hard registers, we already have most of these rules collected in
865 subreg_offset_representable_p. */
866 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
868 unsigned int regno = REGNO (reg);
870 #ifdef CANNOT_CHANGE_MODE_CLASS
871 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
872 && GET_MODE_INNER (imode) == omode)
874 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
875 return false;
876 #endif
878 return subreg_offset_representable_p (regno, imode, offset, omode);
881 /* For pseudo registers, we want most of the same checks. Namely:
882 If the register no larger than a word, the subreg must be lowpart.
883 If the register is larger than a word, the subreg must be the lowpart
884 of a subword. A subreg does *not* perform arbitrary bit extraction.
885 Given that we've already checked mode/offset alignment, we only have
886 to check subword subregs here. */
887 if (osize < UNITS_PER_WORD
888 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
890 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
891 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
892 if (offset % UNITS_PER_WORD != low_off)
893 return false;
895 return true;
899 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
901 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
902 return gen_rtx_raw_SUBREG (mode, reg, offset);
905 /* Generate a SUBREG representing the least-significant part of REG if MODE
906 is smaller than mode of REG, otherwise paradoxical SUBREG. */
909 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
911 machine_mode inmode;
913 inmode = GET_MODE (reg);
914 if (inmode == VOIDmode)
915 inmode = mode;
916 return gen_rtx_SUBREG (mode, reg,
917 subreg_lowpart_offset (mode, inmode));
921 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
922 enum var_init_status status)
924 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
925 PAT_VAR_LOCATION_STATUS (x) = status;
926 return x;
930 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
932 rtvec
933 gen_rtvec (int n, ...)
935 int i;
936 rtvec rt_val;
937 va_list p;
939 va_start (p, n);
941 /* Don't allocate an empty rtvec... */
942 if (n == 0)
944 va_end (p);
945 return NULL_RTVEC;
948 rt_val = rtvec_alloc (n);
950 for (i = 0; i < n; i++)
951 rt_val->elem[i] = va_arg (p, rtx);
953 va_end (p);
954 return rt_val;
957 rtvec
958 gen_rtvec_v (int n, rtx *argp)
960 int i;
961 rtvec rt_val;
963 /* Don't allocate an empty rtvec... */
964 if (n == 0)
965 return NULL_RTVEC;
967 rt_val = rtvec_alloc (n);
969 for (i = 0; i < n; i++)
970 rt_val->elem[i] = *argp++;
972 return rt_val;
975 rtvec
976 gen_rtvec_v (int n, rtx_insn **argp)
978 int i;
979 rtvec rt_val;
981 /* Don't allocate an empty rtvec... */
982 if (n == 0)
983 return NULL_RTVEC;
985 rt_val = rtvec_alloc (n);
987 for (i = 0; i < n; i++)
988 rt_val->elem[i] = *argp++;
990 return rt_val;
994 /* Return the number of bytes between the start of an OUTER_MODE
995 in-memory value and the start of an INNER_MODE in-memory value,
996 given that the former is a lowpart of the latter. It may be a
997 paradoxical lowpart, in which case the offset will be negative
998 on big-endian targets. */
1001 byte_lowpart_offset (machine_mode outer_mode,
1002 machine_mode inner_mode)
1004 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1005 return subreg_lowpart_offset (outer_mode, inner_mode);
1006 else
1007 return -subreg_lowpart_offset (inner_mode, outer_mode);
1010 /* Generate a REG rtx for a new pseudo register of mode MODE.
1011 This pseudo is assigned the next sequential register number. */
1014 gen_reg_rtx (machine_mode mode)
1016 rtx val;
1017 unsigned int align = GET_MODE_ALIGNMENT (mode);
1019 gcc_assert (can_create_pseudo_p ());
1021 /* If a virtual register with bigger mode alignment is generated,
1022 increase stack alignment estimation because it might be spilled
1023 to stack later. */
1024 if (SUPPORTS_STACK_ALIGNMENT
1025 && crtl->stack_alignment_estimated < align
1026 && !crtl->stack_realign_processed)
1028 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1029 if (crtl->stack_alignment_estimated < min_align)
1030 crtl->stack_alignment_estimated = min_align;
1033 if (generating_concat_p
1034 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1035 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1037 /* For complex modes, don't make a single pseudo.
1038 Instead, make a CONCAT of two pseudos.
1039 This allows noncontiguous allocation of the real and imaginary parts,
1040 which makes much better code. Besides, allocating DCmode
1041 pseudos overstrains reload on some machines like the 386. */
1042 rtx realpart, imagpart;
1043 machine_mode partmode = GET_MODE_INNER (mode);
1045 realpart = gen_reg_rtx (partmode);
1046 imagpart = gen_reg_rtx (partmode);
1047 return gen_rtx_CONCAT (mode, realpart, imagpart);
1050 /* Do not call gen_reg_rtx with uninitialized crtl. */
1051 gcc_assert (crtl->emit.regno_pointer_align_length);
1053 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1054 enough to have an element for this pseudo reg number. */
1056 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1058 int old_size = crtl->emit.regno_pointer_align_length;
1059 char *tmp;
1060 rtx *new1;
1062 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1063 memset (tmp + old_size, 0, old_size);
1064 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1066 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1067 memset (new1 + old_size, 0, old_size * sizeof (rtx));
1068 regno_reg_rtx = new1;
1070 crtl->emit.regno_pointer_align_length = old_size * 2;
1073 val = gen_raw_REG (mode, reg_rtx_no);
1074 regno_reg_rtx[reg_rtx_no++] = val;
1075 return val;
1078 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1080 bool
1081 reg_is_parm_p (rtx reg)
1083 tree decl;
1085 gcc_assert (REG_P (reg));
1086 decl = REG_EXPR (reg);
1087 return (decl && TREE_CODE (decl) == PARM_DECL);
1090 /* Update NEW with the same attributes as REG, but with OFFSET added
1091 to the REG_OFFSET. */
1093 static void
1094 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1096 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1097 REG_OFFSET (reg) + offset);
1100 /* Generate a register with same attributes as REG, but with OFFSET
1101 added to the REG_OFFSET. */
1104 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1105 int offset)
1107 rtx new_rtx = gen_rtx_REG (mode, regno);
1109 update_reg_offset (new_rtx, reg, offset);
1110 return new_rtx;
1113 /* Generate a new pseudo-register with the same attributes as REG, but
1114 with OFFSET added to the REG_OFFSET. */
1117 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1119 rtx new_rtx = gen_reg_rtx (mode);
1121 update_reg_offset (new_rtx, reg, offset);
1122 return new_rtx;
1125 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1126 new register is a (possibly paradoxical) lowpart of the old one. */
1128 void
1129 adjust_reg_mode (rtx reg, machine_mode mode)
1131 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1132 PUT_MODE (reg, mode);
1135 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1136 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1138 void
1139 set_reg_attrs_from_value (rtx reg, rtx x)
1141 int offset;
1142 bool can_be_reg_pointer = true;
1144 /* Don't call mark_reg_pointer for incompatible pointer sign
1145 extension. */
1146 while (GET_CODE (x) == SIGN_EXTEND
1147 || GET_CODE (x) == ZERO_EXTEND
1148 || GET_CODE (x) == TRUNCATE
1149 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1151 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1152 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1153 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1154 can_be_reg_pointer = false;
1155 #endif
1156 x = XEXP (x, 0);
1159 /* Hard registers can be reused for multiple purposes within the same
1160 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1161 on them is wrong. */
1162 if (HARD_REGISTER_P (reg))
1163 return;
1165 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1166 if (MEM_P (x))
1168 if (MEM_OFFSET_KNOWN_P (x))
1169 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1170 MEM_OFFSET (x) + offset);
1171 if (can_be_reg_pointer && MEM_POINTER (x))
1172 mark_reg_pointer (reg, 0);
1174 else if (REG_P (x))
1176 if (REG_ATTRS (x))
1177 update_reg_offset (reg, x, offset);
1178 if (can_be_reg_pointer && REG_POINTER (x))
1179 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1183 /* Generate a REG rtx for a new pseudo register, copying the mode
1184 and attributes from X. */
1187 gen_reg_rtx_and_attrs (rtx x)
1189 rtx reg = gen_reg_rtx (GET_MODE (x));
1190 set_reg_attrs_from_value (reg, x);
1191 return reg;
1194 /* Set the register attributes for registers contained in PARM_RTX.
1195 Use needed values from memory attributes of MEM. */
1197 void
1198 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1200 if (REG_P (parm_rtx))
1201 set_reg_attrs_from_value (parm_rtx, mem);
1202 else if (GET_CODE (parm_rtx) == PARALLEL)
1204 /* Check for a NULL entry in the first slot, used to indicate that the
1205 parameter goes both on the stack and in registers. */
1206 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1207 for (; i < XVECLEN (parm_rtx, 0); i++)
1209 rtx x = XVECEXP (parm_rtx, 0, i);
1210 if (REG_P (XEXP (x, 0)))
1211 REG_ATTRS (XEXP (x, 0))
1212 = get_reg_attrs (MEM_EXPR (mem),
1213 INTVAL (XEXP (x, 1)));
1218 /* Set the REG_ATTRS for registers in value X, given that X represents
1219 decl T. */
1221 void
1222 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1224 if (GET_CODE (x) == SUBREG)
1226 gcc_assert (subreg_lowpart_p (x));
1227 x = SUBREG_REG (x);
1229 if (REG_P (x))
1230 REG_ATTRS (x)
1231 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1232 DECL_MODE (t)));
1233 if (GET_CODE (x) == CONCAT)
1235 if (REG_P (XEXP (x, 0)))
1236 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1237 if (REG_P (XEXP (x, 1)))
1238 REG_ATTRS (XEXP (x, 1))
1239 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1241 if (GET_CODE (x) == PARALLEL)
1243 int i, start;
1245 /* Check for a NULL entry, used to indicate that the parameter goes
1246 both on the stack and in registers. */
1247 if (XEXP (XVECEXP (x, 0, 0), 0))
1248 start = 0;
1249 else
1250 start = 1;
1252 for (i = start; i < XVECLEN (x, 0); i++)
1254 rtx y = XVECEXP (x, 0, i);
1255 if (REG_P (XEXP (y, 0)))
1256 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1261 /* Assign the RTX X to declaration T. */
1263 void
1264 set_decl_rtl (tree t, rtx x)
1266 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1267 if (x)
1268 set_reg_attrs_for_decl_rtl (t, x);
1271 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1272 if the ABI requires the parameter to be passed by reference. */
1274 void
1275 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1277 DECL_INCOMING_RTL (t) = x;
1278 if (x && !by_reference_p)
1279 set_reg_attrs_for_decl_rtl (t, x);
1282 /* Identify REG (which may be a CONCAT) as a user register. */
1284 void
1285 mark_user_reg (rtx reg)
1287 if (GET_CODE (reg) == CONCAT)
1289 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1290 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1292 else
1294 gcc_assert (REG_P (reg));
1295 REG_USERVAR_P (reg) = 1;
1299 /* Identify REG as a probable pointer register and show its alignment
1300 as ALIGN, if nonzero. */
1302 void
1303 mark_reg_pointer (rtx reg, int align)
1305 if (! REG_POINTER (reg))
1307 REG_POINTER (reg) = 1;
1309 if (align)
1310 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1312 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1313 /* We can no-longer be sure just how aligned this pointer is. */
1314 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1317 /* Return 1 plus largest pseudo reg number used in the current function. */
1320 max_reg_num (void)
1322 return reg_rtx_no;
1325 /* Return 1 + the largest label number used so far in the current function. */
1328 max_label_num (void)
1330 return label_num;
1333 /* Return first label number used in this function (if any were used). */
1336 get_first_label_num (void)
1338 return first_label_num;
1341 /* If the rtx for label was created during the expansion of a nested
1342 function, then first_label_num won't include this label number.
1343 Fix this now so that array indices work later. */
1345 void
1346 maybe_set_first_label_num (rtx x)
1348 if (CODE_LABEL_NUMBER (x) < first_label_num)
1349 first_label_num = CODE_LABEL_NUMBER (x);
1352 /* Return a value representing some low-order bits of X, where the number
1353 of low-order bits is given by MODE. Note that no conversion is done
1354 between floating-point and fixed-point values, rather, the bit
1355 representation is returned.
1357 This function handles the cases in common between gen_lowpart, below,
1358 and two variants in cse.c and combine.c. These are the cases that can
1359 be safely handled at all points in the compilation.
1361 If this is not a case we can handle, return 0. */
1364 gen_lowpart_common (machine_mode mode, rtx x)
1366 int msize = GET_MODE_SIZE (mode);
1367 int xsize;
1368 int offset = 0;
1369 machine_mode innermode;
1371 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1372 so we have to make one up. Yuk. */
1373 innermode = GET_MODE (x);
1374 if (CONST_INT_P (x)
1375 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1376 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1377 else if (innermode == VOIDmode)
1378 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1380 xsize = GET_MODE_SIZE (innermode);
1382 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1384 if (innermode == mode)
1385 return x;
1387 /* MODE must occupy no more words than the mode of X. */
1388 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1389 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1390 return 0;
1392 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1393 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1394 return 0;
1396 offset = subreg_lowpart_offset (mode, innermode);
1398 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1399 && (GET_MODE_CLASS (mode) == MODE_INT
1400 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1402 /* If we are getting the low-order part of something that has been
1403 sign- or zero-extended, we can either just use the object being
1404 extended or make a narrower extension. If we want an even smaller
1405 piece than the size of the object being extended, call ourselves
1406 recursively.
1408 This case is used mostly by combine and cse. */
1410 if (GET_MODE (XEXP (x, 0)) == mode)
1411 return XEXP (x, 0);
1412 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1413 return gen_lowpart_common (mode, XEXP (x, 0));
1414 else if (msize < xsize)
1415 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1417 else if (GET_CODE (x) == SUBREG || REG_P (x)
1418 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1419 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1420 return simplify_gen_subreg (mode, x, innermode, offset);
1422 /* Otherwise, we can't do this. */
1423 return 0;
1427 gen_highpart (machine_mode mode, rtx x)
1429 unsigned int msize = GET_MODE_SIZE (mode);
1430 rtx result;
1432 /* This case loses if X is a subreg. To catch bugs early,
1433 complain if an invalid MODE is used even in other cases. */
1434 gcc_assert (msize <= UNITS_PER_WORD
1435 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1437 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1438 subreg_highpart_offset (mode, GET_MODE (x)));
1439 gcc_assert (result);
1441 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1442 the target if we have a MEM. gen_highpart must return a valid operand,
1443 emitting code if necessary to do so. */
1444 if (MEM_P (result))
1446 result = validize_mem (result);
1447 gcc_assert (result);
1450 return result;
1453 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1454 be VOIDmode constant. */
1456 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1458 if (GET_MODE (exp) != VOIDmode)
1460 gcc_assert (GET_MODE (exp) == innermode);
1461 return gen_highpart (outermode, exp);
1463 return simplify_gen_subreg (outermode, exp, innermode,
1464 subreg_highpart_offset (outermode, innermode));
1467 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1469 unsigned int
1470 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1472 unsigned int offset = 0;
1473 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1475 if (difference > 0)
1477 if (WORDS_BIG_ENDIAN)
1478 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1479 if (BYTES_BIG_ENDIAN)
1480 offset += difference % UNITS_PER_WORD;
1483 return offset;
1486 /* Return offset in bytes to get OUTERMODE high part
1487 of the value in mode INNERMODE stored in memory in target format. */
1488 unsigned int
1489 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1491 unsigned int offset = 0;
1492 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1494 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1496 if (difference > 0)
1498 if (! WORDS_BIG_ENDIAN)
1499 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1500 if (! BYTES_BIG_ENDIAN)
1501 offset += difference % UNITS_PER_WORD;
1504 return offset;
1507 /* Return 1 iff X, assumed to be a SUBREG,
1508 refers to the least significant part of its containing reg.
1509 If X is not a SUBREG, always return 1 (it is its own low part!). */
1512 subreg_lowpart_p (const_rtx x)
1514 if (GET_CODE (x) != SUBREG)
1515 return 1;
1516 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1517 return 0;
1519 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1520 == SUBREG_BYTE (x));
1523 /* Return true if X is a paradoxical subreg, false otherwise. */
1524 bool
1525 paradoxical_subreg_p (const_rtx x)
1527 if (GET_CODE (x) != SUBREG)
1528 return false;
1529 return (GET_MODE_PRECISION (GET_MODE (x))
1530 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1533 /* Return subword OFFSET of operand OP.
1534 The word number, OFFSET, is interpreted as the word number starting
1535 at the low-order address. OFFSET 0 is the low-order word if not
1536 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1538 If we cannot extract the required word, we return zero. Otherwise,
1539 an rtx corresponding to the requested word will be returned.
1541 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1542 reload has completed, a valid address will always be returned. After
1543 reload, if a valid address cannot be returned, we return zero.
1545 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1546 it is the responsibility of the caller.
1548 MODE is the mode of OP in case it is a CONST_INT.
1550 ??? This is still rather broken for some cases. The problem for the
1551 moment is that all callers of this thing provide no 'goal mode' to
1552 tell us to work with. This exists because all callers were written
1553 in a word based SUBREG world.
1554 Now use of this function can be deprecated by simplify_subreg in most
1555 cases.
1559 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1561 if (mode == VOIDmode)
1562 mode = GET_MODE (op);
1564 gcc_assert (mode != VOIDmode);
1566 /* If OP is narrower than a word, fail. */
1567 if (mode != BLKmode
1568 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1569 return 0;
1571 /* If we want a word outside OP, return zero. */
1572 if (mode != BLKmode
1573 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1574 return const0_rtx;
1576 /* Form a new MEM at the requested address. */
1577 if (MEM_P (op))
1579 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1581 if (! validate_address)
1582 return new_rtx;
1584 else if (reload_completed)
1586 if (! strict_memory_address_addr_space_p (word_mode,
1587 XEXP (new_rtx, 0),
1588 MEM_ADDR_SPACE (op)))
1589 return 0;
1591 else
1592 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1595 /* Rest can be handled by simplify_subreg. */
1596 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1599 /* Similar to `operand_subword', but never return 0. If we can't
1600 extract the required subword, put OP into a register and try again.
1601 The second attempt must succeed. We always validate the address in
1602 this case.
1604 MODE is the mode of OP, in case it is CONST_INT. */
1607 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1609 rtx result = operand_subword (op, offset, 1, mode);
1611 if (result)
1612 return result;
1614 if (mode != BLKmode && mode != VOIDmode)
1616 /* If this is a register which can not be accessed by words, copy it
1617 to a pseudo register. */
1618 if (REG_P (op))
1619 op = copy_to_reg (op);
1620 else
1621 op = force_reg (mode, op);
1624 result = operand_subword (op, offset, 1, mode);
1625 gcc_assert (result);
1627 return result;
1630 /* Returns 1 if both MEM_EXPR can be considered equal
1631 and 0 otherwise. */
1634 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1636 if (expr1 == expr2)
1637 return 1;
1639 if (! expr1 || ! expr2)
1640 return 0;
1642 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1643 return 0;
1645 return operand_equal_p (expr1, expr2, 0);
1648 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1649 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1650 -1 if not known. */
1653 get_mem_align_offset (rtx mem, unsigned int align)
1655 tree expr;
1656 unsigned HOST_WIDE_INT offset;
1658 /* This function can't use
1659 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1660 || (MAX (MEM_ALIGN (mem),
1661 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1662 < align))
1663 return -1;
1664 else
1665 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1666 for two reasons:
1667 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1668 for <variable>. get_inner_reference doesn't handle it and
1669 even if it did, the alignment in that case needs to be determined
1670 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1671 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1672 isn't sufficiently aligned, the object it is in might be. */
1673 gcc_assert (MEM_P (mem));
1674 expr = MEM_EXPR (mem);
1675 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1676 return -1;
1678 offset = MEM_OFFSET (mem);
1679 if (DECL_P (expr))
1681 if (DECL_ALIGN (expr) < align)
1682 return -1;
1684 else if (INDIRECT_REF_P (expr))
1686 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1687 return -1;
1689 else if (TREE_CODE (expr) == COMPONENT_REF)
1691 while (1)
1693 tree inner = TREE_OPERAND (expr, 0);
1694 tree field = TREE_OPERAND (expr, 1);
1695 tree byte_offset = component_ref_field_offset (expr);
1696 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1698 if (!byte_offset
1699 || !tree_fits_uhwi_p (byte_offset)
1700 || !tree_fits_uhwi_p (bit_offset))
1701 return -1;
1703 offset += tree_to_uhwi (byte_offset);
1704 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1706 if (inner == NULL_TREE)
1708 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1709 < (unsigned int) align)
1710 return -1;
1711 break;
1713 else if (DECL_P (inner))
1715 if (DECL_ALIGN (inner) < align)
1716 return -1;
1717 break;
1719 else if (TREE_CODE (inner) != COMPONENT_REF)
1720 return -1;
1721 expr = inner;
1724 else
1725 return -1;
1727 return offset & ((align / BITS_PER_UNIT) - 1);
1730 /* Given REF (a MEM) and T, either the type of X or the expression
1731 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1732 if we are making a new object of this type. BITPOS is nonzero if
1733 there is an offset outstanding on T that will be applied later. */
1735 void
1736 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1737 HOST_WIDE_INT bitpos)
1739 HOST_WIDE_INT apply_bitpos = 0;
1740 tree type;
1741 struct mem_attrs attrs, *defattrs, *refattrs;
1742 addr_space_t as;
1744 /* It can happen that type_for_mode was given a mode for which there
1745 is no language-level type. In which case it returns NULL, which
1746 we can see here. */
1747 if (t == NULL_TREE)
1748 return;
1750 type = TYPE_P (t) ? t : TREE_TYPE (t);
1751 if (type == error_mark_node)
1752 return;
1754 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1755 wrong answer, as it assumes that DECL_RTL already has the right alias
1756 info. Callers should not set DECL_RTL until after the call to
1757 set_mem_attributes. */
1758 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1760 memset (&attrs, 0, sizeof (attrs));
1762 /* Get the alias set from the expression or type (perhaps using a
1763 front-end routine) and use it. */
1764 attrs.alias = get_alias_set (t);
1766 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1767 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1769 /* Default values from pre-existing memory attributes if present. */
1770 refattrs = MEM_ATTRS (ref);
1771 if (refattrs)
1773 /* ??? Can this ever happen? Calling this routine on a MEM that
1774 already carries memory attributes should probably be invalid. */
1775 attrs.expr = refattrs->expr;
1776 attrs.offset_known_p = refattrs->offset_known_p;
1777 attrs.offset = refattrs->offset;
1778 attrs.size_known_p = refattrs->size_known_p;
1779 attrs.size = refattrs->size;
1780 attrs.align = refattrs->align;
1783 /* Otherwise, default values from the mode of the MEM reference. */
1784 else
1786 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1787 gcc_assert (!defattrs->expr);
1788 gcc_assert (!defattrs->offset_known_p);
1790 /* Respect mode size. */
1791 attrs.size_known_p = defattrs->size_known_p;
1792 attrs.size = defattrs->size;
1793 /* ??? Is this really necessary? We probably should always get
1794 the size from the type below. */
1796 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1797 if T is an object, always compute the object alignment below. */
1798 if (TYPE_P (t))
1799 attrs.align = defattrs->align;
1800 else
1801 attrs.align = BITS_PER_UNIT;
1802 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1803 e.g. if the type carries an alignment attribute. Should we be
1804 able to simply always use TYPE_ALIGN? */
1807 /* We can set the alignment from the type if we are making an object,
1808 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1809 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1810 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1812 /* If the size is known, we can set that. */
1813 tree new_size = TYPE_SIZE_UNIT (type);
1815 /* The address-space is that of the type. */
1816 as = TYPE_ADDR_SPACE (type);
1818 /* If T is not a type, we may be able to deduce some more information about
1819 the expression. */
1820 if (! TYPE_P (t))
1822 tree base;
1824 if (TREE_THIS_VOLATILE (t))
1825 MEM_VOLATILE_P (ref) = 1;
1827 /* Now remove any conversions: they don't change what the underlying
1828 object is. Likewise for SAVE_EXPR. */
1829 while (CONVERT_EXPR_P (t)
1830 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1831 || TREE_CODE (t) == SAVE_EXPR)
1832 t = TREE_OPERAND (t, 0);
1834 /* Note whether this expression can trap. */
1835 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1837 base = get_base_address (t);
1838 if (base)
1840 if (DECL_P (base)
1841 && TREE_READONLY (base)
1842 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1843 && !TREE_THIS_VOLATILE (base))
1844 MEM_READONLY_P (ref) = 1;
1846 /* Mark static const strings readonly as well. */
1847 if (TREE_CODE (base) == STRING_CST
1848 && TREE_READONLY (base)
1849 && TREE_STATIC (base))
1850 MEM_READONLY_P (ref) = 1;
1852 /* Address-space information is on the base object. */
1853 if (TREE_CODE (base) == MEM_REF
1854 || TREE_CODE (base) == TARGET_MEM_REF)
1855 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1856 0))));
1857 else
1858 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1861 /* If this expression uses it's parent's alias set, mark it such
1862 that we won't change it. */
1863 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1864 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1866 /* If this is a decl, set the attributes of the MEM from it. */
1867 if (DECL_P (t))
1869 attrs.expr = t;
1870 attrs.offset_known_p = true;
1871 attrs.offset = 0;
1872 apply_bitpos = bitpos;
1873 new_size = DECL_SIZE_UNIT (t);
1876 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1877 else if (CONSTANT_CLASS_P (t))
1880 /* If this is a field reference, record it. */
1881 else if (TREE_CODE (t) == COMPONENT_REF)
1883 attrs.expr = t;
1884 attrs.offset_known_p = true;
1885 attrs.offset = 0;
1886 apply_bitpos = bitpos;
1887 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1888 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1891 /* If this is an array reference, look for an outer field reference. */
1892 else if (TREE_CODE (t) == ARRAY_REF)
1894 tree off_tree = size_zero_node;
1895 /* We can't modify t, because we use it at the end of the
1896 function. */
1897 tree t2 = t;
1901 tree index = TREE_OPERAND (t2, 1);
1902 tree low_bound = array_ref_low_bound (t2);
1903 tree unit_size = array_ref_element_size (t2);
1905 /* We assume all arrays have sizes that are a multiple of a byte.
1906 First subtract the lower bound, if any, in the type of the
1907 index, then convert to sizetype and multiply by the size of
1908 the array element. */
1909 if (! integer_zerop (low_bound))
1910 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1911 index, low_bound);
1913 off_tree = size_binop (PLUS_EXPR,
1914 size_binop (MULT_EXPR,
1915 fold_convert (sizetype,
1916 index),
1917 unit_size),
1918 off_tree);
1919 t2 = TREE_OPERAND (t2, 0);
1921 while (TREE_CODE (t2) == ARRAY_REF);
1923 if (DECL_P (t2)
1924 || TREE_CODE (t2) == COMPONENT_REF)
1926 attrs.expr = t2;
1927 attrs.offset_known_p = false;
1928 if (tree_fits_uhwi_p (off_tree))
1930 attrs.offset_known_p = true;
1931 attrs.offset = tree_to_uhwi (off_tree);
1932 apply_bitpos = bitpos;
1935 /* Else do not record a MEM_EXPR. */
1938 /* If this is an indirect reference, record it. */
1939 else if (TREE_CODE (t) == MEM_REF
1940 || TREE_CODE (t) == TARGET_MEM_REF)
1942 attrs.expr = t;
1943 attrs.offset_known_p = true;
1944 attrs.offset = 0;
1945 apply_bitpos = bitpos;
1948 /* Compute the alignment. */
1949 unsigned int obj_align;
1950 unsigned HOST_WIDE_INT obj_bitpos;
1951 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1952 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1953 if (obj_bitpos != 0)
1954 obj_align = (obj_bitpos & -obj_bitpos);
1955 attrs.align = MAX (attrs.align, obj_align);
1958 if (tree_fits_uhwi_p (new_size))
1960 attrs.size_known_p = true;
1961 attrs.size = tree_to_uhwi (new_size);
1964 /* If we modified OFFSET based on T, then subtract the outstanding
1965 bit position offset. Similarly, increase the size of the accessed
1966 object to contain the negative offset. */
1967 if (apply_bitpos)
1969 gcc_assert (attrs.offset_known_p);
1970 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1971 if (attrs.size_known_p)
1972 attrs.size += apply_bitpos / BITS_PER_UNIT;
1975 /* Now set the attributes we computed above. */
1976 attrs.addrspace = as;
1977 set_mem_attrs (ref, &attrs);
1980 void
1981 set_mem_attributes (rtx ref, tree t, int objectp)
1983 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1986 /* Set the alias set of MEM to SET. */
1988 void
1989 set_mem_alias_set (rtx mem, alias_set_type set)
1991 struct mem_attrs attrs;
1993 /* If the new and old alias sets don't conflict, something is wrong. */
1994 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1995 attrs = *get_mem_attrs (mem);
1996 attrs.alias = set;
1997 set_mem_attrs (mem, &attrs);
2000 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2002 void
2003 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2005 struct mem_attrs attrs;
2007 attrs = *get_mem_attrs (mem);
2008 attrs.addrspace = addrspace;
2009 set_mem_attrs (mem, &attrs);
2012 /* Set the alignment of MEM to ALIGN bits. */
2014 void
2015 set_mem_align (rtx mem, unsigned int align)
2017 struct mem_attrs attrs;
2019 attrs = *get_mem_attrs (mem);
2020 attrs.align = align;
2021 set_mem_attrs (mem, &attrs);
2024 /* Set the expr for MEM to EXPR. */
2026 void
2027 set_mem_expr (rtx mem, tree expr)
2029 struct mem_attrs attrs;
2031 attrs = *get_mem_attrs (mem);
2032 attrs.expr = expr;
2033 set_mem_attrs (mem, &attrs);
2036 /* Set the offset of MEM to OFFSET. */
2038 void
2039 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2041 struct mem_attrs attrs;
2043 attrs = *get_mem_attrs (mem);
2044 attrs.offset_known_p = true;
2045 attrs.offset = offset;
2046 set_mem_attrs (mem, &attrs);
2049 /* Clear the offset of MEM. */
2051 void
2052 clear_mem_offset (rtx mem)
2054 struct mem_attrs attrs;
2056 attrs = *get_mem_attrs (mem);
2057 attrs.offset_known_p = false;
2058 set_mem_attrs (mem, &attrs);
2061 /* Set the size of MEM to SIZE. */
2063 void
2064 set_mem_size (rtx mem, HOST_WIDE_INT size)
2066 struct mem_attrs attrs;
2068 attrs = *get_mem_attrs (mem);
2069 attrs.size_known_p = true;
2070 attrs.size = size;
2071 set_mem_attrs (mem, &attrs);
2074 /* Clear the size of MEM. */
2076 void
2077 clear_mem_size (rtx mem)
2079 struct mem_attrs attrs;
2081 attrs = *get_mem_attrs (mem);
2082 attrs.size_known_p = false;
2083 set_mem_attrs (mem, &attrs);
2086 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2087 and its address changed to ADDR. (VOIDmode means don't change the mode.
2088 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2089 returned memory location is required to be valid. INPLACE is true if any
2090 changes can be made directly to MEMREF or false if MEMREF must be treated
2091 as immutable.
2093 The memory attributes are not changed. */
2095 static rtx
2096 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2097 bool inplace)
2099 addr_space_t as;
2100 rtx new_rtx;
2102 gcc_assert (MEM_P (memref));
2103 as = MEM_ADDR_SPACE (memref);
2104 if (mode == VOIDmode)
2105 mode = GET_MODE (memref);
2106 if (addr == 0)
2107 addr = XEXP (memref, 0);
2108 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2109 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2110 return memref;
2112 /* Don't validate address for LRA. LRA can make the address valid
2113 by itself in most efficient way. */
2114 if (validate && !lra_in_progress)
2116 if (reload_in_progress || reload_completed)
2117 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2118 else
2119 addr = memory_address_addr_space (mode, addr, as);
2122 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2123 return memref;
2125 if (inplace)
2127 XEXP (memref, 0) = addr;
2128 return memref;
2131 new_rtx = gen_rtx_MEM (mode, addr);
2132 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2133 return new_rtx;
2136 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2137 way we are changing MEMREF, so we only preserve the alias set. */
2140 change_address (rtx memref, machine_mode mode, rtx addr)
2142 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2143 machine_mode mmode = GET_MODE (new_rtx);
2144 struct mem_attrs attrs, *defattrs;
2146 attrs = *get_mem_attrs (memref);
2147 defattrs = mode_mem_attrs[(int) mmode];
2148 attrs.expr = NULL_TREE;
2149 attrs.offset_known_p = false;
2150 attrs.size_known_p = defattrs->size_known_p;
2151 attrs.size = defattrs->size;
2152 attrs.align = defattrs->align;
2154 /* If there are no changes, just return the original memory reference. */
2155 if (new_rtx == memref)
2157 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2158 return new_rtx;
2160 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2161 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2164 set_mem_attrs (new_rtx, &attrs);
2165 return new_rtx;
2168 /* Return a memory reference like MEMREF, but with its mode changed
2169 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2170 nonzero, the memory address is forced to be valid.
2171 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2172 and the caller is responsible for adjusting MEMREF base register.
2173 If ADJUST_OBJECT is zero, the underlying object associated with the
2174 memory reference is left unchanged and the caller is responsible for
2175 dealing with it. Otherwise, if the new memory reference is outside
2176 the underlying object, even partially, then the object is dropped.
2177 SIZE, if nonzero, is the size of an access in cases where MODE
2178 has no inherent size. */
2181 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2182 int validate, int adjust_address, int adjust_object,
2183 HOST_WIDE_INT size)
2185 rtx addr = XEXP (memref, 0);
2186 rtx new_rtx;
2187 machine_mode address_mode;
2188 int pbits;
2189 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2190 unsigned HOST_WIDE_INT max_align;
2191 #ifdef POINTERS_EXTEND_UNSIGNED
2192 machine_mode pointer_mode
2193 = targetm.addr_space.pointer_mode (attrs.addrspace);
2194 #endif
2196 /* VOIDmode means no mode change for change_address_1. */
2197 if (mode == VOIDmode)
2198 mode = GET_MODE (memref);
2200 /* Take the size of non-BLKmode accesses from the mode. */
2201 defattrs = mode_mem_attrs[(int) mode];
2202 if (defattrs->size_known_p)
2203 size = defattrs->size;
2205 /* If there are no changes, just return the original memory reference. */
2206 if (mode == GET_MODE (memref) && !offset
2207 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2208 && (!validate || memory_address_addr_space_p (mode, addr,
2209 attrs.addrspace)))
2210 return memref;
2212 /* ??? Prefer to create garbage instead of creating shared rtl.
2213 This may happen even if offset is nonzero -- consider
2214 (plus (plus reg reg) const_int) -- so do this always. */
2215 addr = copy_rtx (addr);
2217 /* Convert a possibly large offset to a signed value within the
2218 range of the target address space. */
2219 address_mode = get_address_mode (memref);
2220 pbits = GET_MODE_BITSIZE (address_mode);
2221 if (HOST_BITS_PER_WIDE_INT > pbits)
2223 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2224 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2225 >> shift);
2228 if (adjust_address)
2230 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2231 object, we can merge it into the LO_SUM. */
2232 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2233 && offset >= 0
2234 && (unsigned HOST_WIDE_INT) offset
2235 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2236 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2237 plus_constant (address_mode,
2238 XEXP (addr, 1), offset));
2239 #ifdef POINTERS_EXTEND_UNSIGNED
2240 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2241 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2242 the fact that pointers are not allowed to overflow. */
2243 else if (POINTERS_EXTEND_UNSIGNED > 0
2244 && GET_CODE (addr) == ZERO_EXTEND
2245 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2246 && trunc_int_for_mode (offset, pointer_mode) == offset)
2247 addr = gen_rtx_ZERO_EXTEND (address_mode,
2248 plus_constant (pointer_mode,
2249 XEXP (addr, 0), offset));
2250 #endif
2251 else
2252 addr = plus_constant (address_mode, addr, offset);
2255 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2257 /* If the address is a REG, change_address_1 rightfully returns memref,
2258 but this would destroy memref's MEM_ATTRS. */
2259 if (new_rtx == memref && offset != 0)
2260 new_rtx = copy_rtx (new_rtx);
2262 /* Conservatively drop the object if we don't know where we start from. */
2263 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2265 attrs.expr = NULL_TREE;
2266 attrs.alias = 0;
2269 /* Compute the new values of the memory attributes due to this adjustment.
2270 We add the offsets and update the alignment. */
2271 if (attrs.offset_known_p)
2273 attrs.offset += offset;
2275 /* Drop the object if the new left end is not within its bounds. */
2276 if (adjust_object && attrs.offset < 0)
2278 attrs.expr = NULL_TREE;
2279 attrs.alias = 0;
2283 /* Compute the new alignment by taking the MIN of the alignment and the
2284 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2285 if zero. */
2286 if (offset != 0)
2288 max_align = (offset & -offset) * BITS_PER_UNIT;
2289 attrs.align = MIN (attrs.align, max_align);
2292 if (size)
2294 /* Drop the object if the new right end is not within its bounds. */
2295 if (adjust_object && (offset + size) > attrs.size)
2297 attrs.expr = NULL_TREE;
2298 attrs.alias = 0;
2300 attrs.size_known_p = true;
2301 attrs.size = size;
2303 else if (attrs.size_known_p)
2305 gcc_assert (!adjust_object);
2306 attrs.size -= offset;
2307 /* ??? The store_by_pieces machinery generates negative sizes,
2308 so don't assert for that here. */
2311 set_mem_attrs (new_rtx, &attrs);
2313 return new_rtx;
2316 /* Return a memory reference like MEMREF, but with its mode changed
2317 to MODE and its address changed to ADDR, which is assumed to be
2318 MEMREF offset by OFFSET bytes. If VALIDATE is
2319 nonzero, the memory address is forced to be valid. */
2322 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2323 HOST_WIDE_INT offset, int validate)
2325 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2326 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2329 /* Return a memory reference like MEMREF, but whose address is changed by
2330 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2331 known to be in OFFSET (possibly 1). */
2334 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2336 rtx new_rtx, addr = XEXP (memref, 0);
2337 machine_mode address_mode;
2338 struct mem_attrs attrs, *defattrs;
2340 attrs = *get_mem_attrs (memref);
2341 address_mode = get_address_mode (memref);
2342 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2344 /* At this point we don't know _why_ the address is invalid. It
2345 could have secondary memory references, multiplies or anything.
2347 However, if we did go and rearrange things, we can wind up not
2348 being able to recognize the magic around pic_offset_table_rtx.
2349 This stuff is fragile, and is yet another example of why it is
2350 bad to expose PIC machinery too early. */
2351 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2352 attrs.addrspace)
2353 && GET_CODE (addr) == PLUS
2354 && XEXP (addr, 0) == pic_offset_table_rtx)
2356 addr = force_reg (GET_MODE (addr), addr);
2357 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2360 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2361 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2363 /* If there are no changes, just return the original memory reference. */
2364 if (new_rtx == memref)
2365 return new_rtx;
2367 /* Update the alignment to reflect the offset. Reset the offset, which
2368 we don't know. */
2369 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2370 attrs.offset_known_p = false;
2371 attrs.size_known_p = defattrs->size_known_p;
2372 attrs.size = defattrs->size;
2373 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2374 set_mem_attrs (new_rtx, &attrs);
2375 return new_rtx;
2378 /* Return a memory reference like MEMREF, but with its address changed to
2379 ADDR. The caller is asserting that the actual piece of memory pointed
2380 to is the same, just the form of the address is being changed, such as
2381 by putting something into a register. INPLACE is true if any changes
2382 can be made directly to MEMREF or false if MEMREF must be treated as
2383 immutable. */
2386 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2388 /* change_address_1 copies the memory attribute structure without change
2389 and that's exactly what we want here. */
2390 update_temp_slot_address (XEXP (memref, 0), addr);
2391 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2394 /* Likewise, but the reference is not required to be valid. */
2397 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2399 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2402 /* Return a memory reference like MEMREF, but with its mode widened to
2403 MODE and offset by OFFSET. This would be used by targets that e.g.
2404 cannot issue QImode memory operations and have to use SImode memory
2405 operations plus masking logic. */
2408 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2410 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2411 struct mem_attrs attrs;
2412 unsigned int size = GET_MODE_SIZE (mode);
2414 /* If there are no changes, just return the original memory reference. */
2415 if (new_rtx == memref)
2416 return new_rtx;
2418 attrs = *get_mem_attrs (new_rtx);
2420 /* If we don't know what offset we were at within the expression, then
2421 we can't know if we've overstepped the bounds. */
2422 if (! attrs.offset_known_p)
2423 attrs.expr = NULL_TREE;
2425 while (attrs.expr)
2427 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2429 tree field = TREE_OPERAND (attrs.expr, 1);
2430 tree offset = component_ref_field_offset (attrs.expr);
2432 if (! DECL_SIZE_UNIT (field))
2434 attrs.expr = NULL_TREE;
2435 break;
2438 /* Is the field at least as large as the access? If so, ok,
2439 otherwise strip back to the containing structure. */
2440 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2441 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2442 && attrs.offset >= 0)
2443 break;
2445 if (! tree_fits_uhwi_p (offset))
2447 attrs.expr = NULL_TREE;
2448 break;
2451 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2452 attrs.offset += tree_to_uhwi (offset);
2453 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2454 / BITS_PER_UNIT);
2456 /* Similarly for the decl. */
2457 else if (DECL_P (attrs.expr)
2458 && DECL_SIZE_UNIT (attrs.expr)
2459 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2460 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2461 && (! attrs.offset_known_p || attrs.offset >= 0))
2462 break;
2463 else
2465 /* The widened memory access overflows the expression, which means
2466 that it could alias another expression. Zap it. */
2467 attrs.expr = NULL_TREE;
2468 break;
2472 if (! attrs.expr)
2473 attrs.offset_known_p = false;
2475 /* The widened memory may alias other stuff, so zap the alias set. */
2476 /* ??? Maybe use get_alias_set on any remaining expression. */
2477 attrs.alias = 0;
2478 attrs.size_known_p = true;
2479 attrs.size = size;
2480 set_mem_attrs (new_rtx, &attrs);
2481 return new_rtx;
2484 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2485 static GTY(()) tree spill_slot_decl;
2487 tree
2488 get_spill_slot_decl (bool force_build_p)
2490 tree d = spill_slot_decl;
2491 rtx rd;
2492 struct mem_attrs attrs;
2494 if (d || !force_build_p)
2495 return d;
2497 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2498 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2499 DECL_ARTIFICIAL (d) = 1;
2500 DECL_IGNORED_P (d) = 1;
2501 TREE_USED (d) = 1;
2502 spill_slot_decl = d;
2504 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2505 MEM_NOTRAP_P (rd) = 1;
2506 attrs = *mode_mem_attrs[(int) BLKmode];
2507 attrs.alias = new_alias_set ();
2508 attrs.expr = d;
2509 set_mem_attrs (rd, &attrs);
2510 SET_DECL_RTL (d, rd);
2512 return d;
2515 /* Given MEM, a result from assign_stack_local, fill in the memory
2516 attributes as appropriate for a register allocator spill slot.
2517 These slots are not aliasable by other memory. We arrange for
2518 them all to use a single MEM_EXPR, so that the aliasing code can
2519 work properly in the case of shared spill slots. */
2521 void
2522 set_mem_attrs_for_spill (rtx mem)
2524 struct mem_attrs attrs;
2525 rtx addr;
2527 attrs = *get_mem_attrs (mem);
2528 attrs.expr = get_spill_slot_decl (true);
2529 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2530 attrs.addrspace = ADDR_SPACE_GENERIC;
2532 /* We expect the incoming memory to be of the form:
2533 (mem:MODE (plus (reg sfp) (const_int offset)))
2534 with perhaps the plus missing for offset = 0. */
2535 addr = XEXP (mem, 0);
2536 attrs.offset_known_p = true;
2537 attrs.offset = 0;
2538 if (GET_CODE (addr) == PLUS
2539 && CONST_INT_P (XEXP (addr, 1)))
2540 attrs.offset = INTVAL (XEXP (addr, 1));
2542 set_mem_attrs (mem, &attrs);
2543 MEM_NOTRAP_P (mem) = 1;
2546 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2548 rtx_code_label *
2549 gen_label_rtx (void)
2551 return as_a <rtx_code_label *> (
2552 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2553 NULL, label_num++, NULL));
2556 /* For procedure integration. */
2558 /* Install new pointers to the first and last insns in the chain.
2559 Also, set cur_insn_uid to one higher than the last in use.
2560 Used for an inline-procedure after copying the insn chain. */
2562 void
2563 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2565 rtx_insn *insn;
2567 set_first_insn (first);
2568 set_last_insn (last);
2569 cur_insn_uid = 0;
2571 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2573 int debug_count = 0;
2575 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2576 cur_debug_insn_uid = 0;
2578 for (insn = first; insn; insn = NEXT_INSN (insn))
2579 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2580 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2581 else
2583 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2584 if (DEBUG_INSN_P (insn))
2585 debug_count++;
2588 if (debug_count)
2589 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2590 else
2591 cur_debug_insn_uid++;
2593 else
2594 for (insn = first; insn; insn = NEXT_INSN (insn))
2595 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2597 cur_insn_uid++;
2600 /* Go through all the RTL insn bodies and copy any invalid shared
2601 structure. This routine should only be called once. */
2603 static void
2604 unshare_all_rtl_1 (rtx_insn *insn)
2606 /* Unshare just about everything else. */
2607 unshare_all_rtl_in_chain (insn);
2609 /* Make sure the addresses of stack slots found outside the insn chain
2610 (such as, in DECL_RTL of a variable) are not shared
2611 with the insn chain.
2613 This special care is necessary when the stack slot MEM does not
2614 actually appear in the insn chain. If it does appear, its address
2615 is unshared from all else at that point. */
2616 stack_slot_list = safe_as_a <rtx_expr_list *> (
2617 copy_rtx_if_shared (stack_slot_list));
2620 /* Go through all the RTL insn bodies and copy any invalid shared
2621 structure, again. This is a fairly expensive thing to do so it
2622 should be done sparingly. */
2624 void
2625 unshare_all_rtl_again (rtx_insn *insn)
2627 rtx_insn *p;
2628 tree decl;
2630 for (p = insn; p; p = NEXT_INSN (p))
2631 if (INSN_P (p))
2633 reset_used_flags (PATTERN (p));
2634 reset_used_flags (REG_NOTES (p));
2635 if (CALL_P (p))
2636 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2639 /* Make sure that virtual stack slots are not shared. */
2640 set_used_decls (DECL_INITIAL (cfun->decl));
2642 /* Make sure that virtual parameters are not shared. */
2643 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2644 set_used_flags (DECL_RTL (decl));
2646 reset_used_flags (stack_slot_list);
2648 unshare_all_rtl_1 (insn);
2651 unsigned int
2652 unshare_all_rtl (void)
2654 unshare_all_rtl_1 (get_insns ());
2655 return 0;
2659 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2660 Recursively does the same for subexpressions. */
2662 static void
2663 verify_rtx_sharing (rtx orig, rtx insn)
2665 rtx x = orig;
2666 int i;
2667 enum rtx_code code;
2668 const char *format_ptr;
2670 if (x == 0)
2671 return;
2673 code = GET_CODE (x);
2675 /* These types may be freely shared. */
2677 switch (code)
2679 case REG:
2680 case DEBUG_EXPR:
2681 case VALUE:
2682 CASE_CONST_ANY:
2683 case SYMBOL_REF:
2684 case LABEL_REF:
2685 case CODE_LABEL:
2686 case PC:
2687 case CC0:
2688 case RETURN:
2689 case SIMPLE_RETURN:
2690 case SCRATCH:
2691 /* SCRATCH must be shared because they represent distinct values. */
2692 return;
2693 case CLOBBER:
2694 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2695 clobbers or clobbers of hard registers that originated as pseudos.
2696 This is needed to allow safe register renaming. */
2697 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2698 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2699 return;
2700 break;
2702 case CONST:
2703 if (shared_const_p (orig))
2704 return;
2705 break;
2707 case MEM:
2708 /* A MEM is allowed to be shared if its address is constant. */
2709 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2710 || reload_completed || reload_in_progress)
2711 return;
2713 break;
2715 default:
2716 break;
2719 /* This rtx may not be shared. If it has already been seen,
2720 replace it with a copy of itself. */
2721 #ifdef ENABLE_CHECKING
2722 if (RTX_FLAG (x, used))
2724 error ("invalid rtl sharing found in the insn");
2725 debug_rtx (insn);
2726 error ("shared rtx");
2727 debug_rtx (x);
2728 internal_error ("internal consistency failure");
2730 #endif
2731 gcc_assert (!RTX_FLAG (x, used));
2733 RTX_FLAG (x, used) = 1;
2735 /* Now scan the subexpressions recursively. */
2737 format_ptr = GET_RTX_FORMAT (code);
2739 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2741 switch (*format_ptr++)
2743 case 'e':
2744 verify_rtx_sharing (XEXP (x, i), insn);
2745 break;
2747 case 'E':
2748 if (XVEC (x, i) != NULL)
2750 int j;
2751 int len = XVECLEN (x, i);
2753 for (j = 0; j < len; j++)
2755 /* We allow sharing of ASM_OPERANDS inside single
2756 instruction. */
2757 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2758 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2759 == ASM_OPERANDS))
2760 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2761 else
2762 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2765 break;
2768 return;
2771 /* Reset used-flags for INSN. */
2773 static void
2774 reset_insn_used_flags (rtx insn)
2776 gcc_assert (INSN_P (insn));
2777 reset_used_flags (PATTERN (insn));
2778 reset_used_flags (REG_NOTES (insn));
2779 if (CALL_P (insn))
2780 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2783 /* Go through all the RTL insn bodies and clear all the USED bits. */
2785 static void
2786 reset_all_used_flags (void)
2788 rtx_insn *p;
2790 for (p = get_insns (); p; p = NEXT_INSN (p))
2791 if (INSN_P (p))
2793 rtx pat = PATTERN (p);
2794 if (GET_CODE (pat) != SEQUENCE)
2795 reset_insn_used_flags (p);
2796 else
2798 gcc_assert (REG_NOTES (p) == NULL);
2799 for (int i = 0; i < XVECLEN (pat, 0); i++)
2801 rtx insn = XVECEXP (pat, 0, i);
2802 if (INSN_P (insn))
2803 reset_insn_used_flags (insn);
2809 /* Verify sharing in INSN. */
2811 static void
2812 verify_insn_sharing (rtx insn)
2814 gcc_assert (INSN_P (insn));
2815 reset_used_flags (PATTERN (insn));
2816 reset_used_flags (REG_NOTES (insn));
2817 if (CALL_P (insn))
2818 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2821 /* Go through all the RTL insn bodies and check that there is no unexpected
2822 sharing in between the subexpressions. */
2824 DEBUG_FUNCTION void
2825 verify_rtl_sharing (void)
2827 rtx_insn *p;
2829 timevar_push (TV_VERIFY_RTL_SHARING);
2831 reset_all_used_flags ();
2833 for (p = get_insns (); p; p = NEXT_INSN (p))
2834 if (INSN_P (p))
2836 rtx pat = PATTERN (p);
2837 if (GET_CODE (pat) != SEQUENCE)
2838 verify_insn_sharing (p);
2839 else
2840 for (int i = 0; i < XVECLEN (pat, 0); i++)
2842 rtx insn = XVECEXP (pat, 0, i);
2843 if (INSN_P (insn))
2844 verify_insn_sharing (insn);
2848 reset_all_used_flags ();
2850 timevar_pop (TV_VERIFY_RTL_SHARING);
2853 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2854 Assumes the mark bits are cleared at entry. */
2856 void
2857 unshare_all_rtl_in_chain (rtx_insn *insn)
2859 for (; insn; insn = NEXT_INSN (insn))
2860 if (INSN_P (insn))
2862 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2863 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2864 if (CALL_P (insn))
2865 CALL_INSN_FUNCTION_USAGE (insn)
2866 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2870 /* Go through all virtual stack slots of a function and mark them as
2871 shared. We never replace the DECL_RTLs themselves with a copy,
2872 but expressions mentioned into a DECL_RTL cannot be shared with
2873 expressions in the instruction stream.
2875 Note that reload may convert pseudo registers into memories in-place.
2876 Pseudo registers are always shared, but MEMs never are. Thus if we
2877 reset the used flags on MEMs in the instruction stream, we must set
2878 them again on MEMs that appear in DECL_RTLs. */
2880 static void
2881 set_used_decls (tree blk)
2883 tree t;
2885 /* Mark decls. */
2886 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2887 if (DECL_RTL_SET_P (t))
2888 set_used_flags (DECL_RTL (t));
2890 /* Now process sub-blocks. */
2891 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2892 set_used_decls (t);
2895 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2896 Recursively does the same for subexpressions. Uses
2897 copy_rtx_if_shared_1 to reduce stack space. */
2900 copy_rtx_if_shared (rtx orig)
2902 copy_rtx_if_shared_1 (&orig);
2903 return orig;
2906 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2907 use. Recursively does the same for subexpressions. */
2909 static void
2910 copy_rtx_if_shared_1 (rtx *orig1)
2912 rtx x;
2913 int i;
2914 enum rtx_code code;
2915 rtx *last_ptr;
2916 const char *format_ptr;
2917 int copied = 0;
2918 int length;
2920 /* Repeat is used to turn tail-recursion into iteration. */
2921 repeat:
2922 x = *orig1;
2924 if (x == 0)
2925 return;
2927 code = GET_CODE (x);
2929 /* These types may be freely shared. */
2931 switch (code)
2933 case REG:
2934 case DEBUG_EXPR:
2935 case VALUE:
2936 CASE_CONST_ANY:
2937 case SYMBOL_REF:
2938 case LABEL_REF:
2939 case CODE_LABEL:
2940 case PC:
2941 case CC0:
2942 case RETURN:
2943 case SIMPLE_RETURN:
2944 case SCRATCH:
2945 /* SCRATCH must be shared because they represent distinct values. */
2946 return;
2947 case CLOBBER:
2948 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2949 clobbers or clobbers of hard registers that originated as pseudos.
2950 This is needed to allow safe register renaming. */
2951 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2952 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2953 return;
2954 break;
2956 case CONST:
2957 if (shared_const_p (x))
2958 return;
2959 break;
2961 case DEBUG_INSN:
2962 case INSN:
2963 case JUMP_INSN:
2964 case CALL_INSN:
2965 case NOTE:
2966 case BARRIER:
2967 /* The chain of insns is not being copied. */
2968 return;
2970 default:
2971 break;
2974 /* This rtx may not be shared. If it has already been seen,
2975 replace it with a copy of itself. */
2977 if (RTX_FLAG (x, used))
2979 x = shallow_copy_rtx (x);
2980 copied = 1;
2982 RTX_FLAG (x, used) = 1;
2984 /* Now scan the subexpressions recursively.
2985 We can store any replaced subexpressions directly into X
2986 since we know X is not shared! Any vectors in X
2987 must be copied if X was copied. */
2989 format_ptr = GET_RTX_FORMAT (code);
2990 length = GET_RTX_LENGTH (code);
2991 last_ptr = NULL;
2993 for (i = 0; i < length; i++)
2995 switch (*format_ptr++)
2997 case 'e':
2998 if (last_ptr)
2999 copy_rtx_if_shared_1 (last_ptr);
3000 last_ptr = &XEXP (x, i);
3001 break;
3003 case 'E':
3004 if (XVEC (x, i) != NULL)
3006 int j;
3007 int len = XVECLEN (x, i);
3009 /* Copy the vector iff I copied the rtx and the length
3010 is nonzero. */
3011 if (copied && len > 0)
3012 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3014 /* Call recursively on all inside the vector. */
3015 for (j = 0; j < len; j++)
3017 if (last_ptr)
3018 copy_rtx_if_shared_1 (last_ptr);
3019 last_ptr = &XVECEXP (x, i, j);
3022 break;
3025 *orig1 = x;
3026 if (last_ptr)
3028 orig1 = last_ptr;
3029 goto repeat;
3031 return;
3034 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3036 static void
3037 mark_used_flags (rtx x, int flag)
3039 int i, j;
3040 enum rtx_code code;
3041 const char *format_ptr;
3042 int length;
3044 /* Repeat is used to turn tail-recursion into iteration. */
3045 repeat:
3046 if (x == 0)
3047 return;
3049 code = GET_CODE (x);
3051 /* These types may be freely shared so we needn't do any resetting
3052 for them. */
3054 switch (code)
3056 case REG:
3057 case DEBUG_EXPR:
3058 case VALUE:
3059 CASE_CONST_ANY:
3060 case SYMBOL_REF:
3061 case CODE_LABEL:
3062 case PC:
3063 case CC0:
3064 case RETURN:
3065 case SIMPLE_RETURN:
3066 return;
3068 case DEBUG_INSN:
3069 case INSN:
3070 case JUMP_INSN:
3071 case CALL_INSN:
3072 case NOTE:
3073 case LABEL_REF:
3074 case BARRIER:
3075 /* The chain of insns is not being copied. */
3076 return;
3078 default:
3079 break;
3082 RTX_FLAG (x, used) = flag;
3084 format_ptr = GET_RTX_FORMAT (code);
3085 length = GET_RTX_LENGTH (code);
3087 for (i = 0; i < length; i++)
3089 switch (*format_ptr++)
3091 case 'e':
3092 if (i == length-1)
3094 x = XEXP (x, i);
3095 goto repeat;
3097 mark_used_flags (XEXP (x, i), flag);
3098 break;
3100 case 'E':
3101 for (j = 0; j < XVECLEN (x, i); j++)
3102 mark_used_flags (XVECEXP (x, i, j), flag);
3103 break;
3108 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3109 to look for shared sub-parts. */
3111 void
3112 reset_used_flags (rtx x)
3114 mark_used_flags (x, 0);
3117 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3118 to look for shared sub-parts. */
3120 void
3121 set_used_flags (rtx x)
3123 mark_used_flags (x, 1);
3126 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3127 Return X or the rtx for the pseudo reg the value of X was copied into.
3128 OTHER must be valid as a SET_DEST. */
3131 make_safe_from (rtx x, rtx other)
3133 while (1)
3134 switch (GET_CODE (other))
3136 case SUBREG:
3137 other = SUBREG_REG (other);
3138 break;
3139 case STRICT_LOW_PART:
3140 case SIGN_EXTEND:
3141 case ZERO_EXTEND:
3142 other = XEXP (other, 0);
3143 break;
3144 default:
3145 goto done;
3147 done:
3148 if ((MEM_P (other)
3149 && ! CONSTANT_P (x)
3150 && !REG_P (x)
3151 && GET_CODE (x) != SUBREG)
3152 || (REG_P (other)
3153 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3154 || reg_mentioned_p (other, x))))
3156 rtx temp = gen_reg_rtx (GET_MODE (x));
3157 emit_move_insn (temp, x);
3158 return temp;
3160 return x;
3163 /* Emission of insns (adding them to the doubly-linked list). */
3165 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3167 rtx_insn *
3168 get_last_insn_anywhere (void)
3170 struct sequence_stack *stack;
3171 if (get_last_insn ())
3172 return get_last_insn ();
3173 for (stack = seq_stack; stack; stack = stack->next)
3174 if (stack->last != 0)
3175 return stack->last;
3176 return 0;
3179 /* Return the first nonnote insn emitted in current sequence or current
3180 function. This routine looks inside SEQUENCEs. */
3182 rtx_insn *
3183 get_first_nonnote_insn (void)
3185 rtx_insn *insn = get_insns ();
3187 if (insn)
3189 if (NOTE_P (insn))
3190 for (insn = next_insn (insn);
3191 insn && NOTE_P (insn);
3192 insn = next_insn (insn))
3193 continue;
3194 else
3196 if (NONJUMP_INSN_P (insn)
3197 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3198 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3202 return insn;
3205 /* Return the last nonnote insn emitted in current sequence or current
3206 function. This routine looks inside SEQUENCEs. */
3208 rtx_insn *
3209 get_last_nonnote_insn (void)
3211 rtx_insn *insn = get_last_insn ();
3213 if (insn)
3215 if (NOTE_P (insn))
3216 for (insn = previous_insn (insn);
3217 insn && NOTE_P (insn);
3218 insn = previous_insn (insn))
3219 continue;
3220 else
3222 if (NONJUMP_INSN_P (insn))
3223 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3224 insn = seq->insn (seq->len () - 1);
3228 return insn;
3231 /* Return the number of actual (non-debug) insns emitted in this
3232 function. */
3235 get_max_insn_count (void)
3237 int n = cur_insn_uid;
3239 /* The table size must be stable across -g, to avoid codegen
3240 differences due to debug insns, and not be affected by
3241 -fmin-insn-uid, to avoid excessive table size and to simplify
3242 debugging of -fcompare-debug failures. */
3243 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3244 n -= cur_debug_insn_uid;
3245 else
3246 n -= MIN_NONDEBUG_INSN_UID;
3248 return n;
3252 /* Return the next insn. If it is a SEQUENCE, return the first insn
3253 of the sequence. */
3255 rtx_insn *
3256 next_insn (rtx_insn *insn)
3258 if (insn)
3260 insn = NEXT_INSN (insn);
3261 if (insn && NONJUMP_INSN_P (insn)
3262 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3263 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3266 return insn;
3269 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3270 of the sequence. */
3272 rtx_insn *
3273 previous_insn (rtx_insn *insn)
3275 if (insn)
3277 insn = PREV_INSN (insn);
3278 if (insn && NONJUMP_INSN_P (insn))
3279 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3280 insn = seq->insn (seq->len () - 1);
3283 return insn;
3286 /* Return the next insn after INSN that is not a NOTE. This routine does not
3287 look inside SEQUENCEs. */
3289 rtx_insn *
3290 next_nonnote_insn (rtx uncast_insn)
3292 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3293 while (insn)
3295 insn = NEXT_INSN (insn);
3296 if (insn == 0 || !NOTE_P (insn))
3297 break;
3300 return insn;
3303 /* Return the next insn after INSN that is not a NOTE, but stop the
3304 search before we enter another basic block. This routine does not
3305 look inside SEQUENCEs. */
3307 rtx_insn *
3308 next_nonnote_insn_bb (rtx_insn *insn)
3310 while (insn)
3312 insn = NEXT_INSN (insn);
3313 if (insn == 0 || !NOTE_P (insn))
3314 break;
3315 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3316 return NULL;
3319 return insn;
3322 /* Return the previous insn before INSN that is not a NOTE. This routine does
3323 not look inside SEQUENCEs. */
3325 rtx_insn *
3326 prev_nonnote_insn (rtx uncast_insn)
3328 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3330 while (insn)
3332 insn = PREV_INSN (insn);
3333 if (insn == 0 || !NOTE_P (insn))
3334 break;
3337 return insn;
3340 /* Return the previous insn before INSN that is not a NOTE, but stop
3341 the search before we enter another basic block. This routine does
3342 not look inside SEQUENCEs. */
3344 rtx_insn *
3345 prev_nonnote_insn_bb (rtx uncast_insn)
3347 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3349 while (insn)
3351 insn = PREV_INSN (insn);
3352 if (insn == 0 || !NOTE_P (insn))
3353 break;
3354 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3355 return NULL;
3358 return insn;
3361 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3362 routine does not look inside SEQUENCEs. */
3364 rtx_insn *
3365 next_nondebug_insn (rtx uncast_insn)
3367 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3369 while (insn)
3371 insn = NEXT_INSN (insn);
3372 if (insn == 0 || !DEBUG_INSN_P (insn))
3373 break;
3376 return insn;
3379 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3380 This routine does not look inside SEQUENCEs. */
3382 rtx_insn *
3383 prev_nondebug_insn (rtx uncast_insn)
3385 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3387 while (insn)
3389 insn = PREV_INSN (insn);
3390 if (insn == 0 || !DEBUG_INSN_P (insn))
3391 break;
3394 return insn;
3397 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3398 This routine does not look inside SEQUENCEs. */
3400 rtx_insn *
3401 next_nonnote_nondebug_insn (rtx uncast_insn)
3403 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3405 while (insn)
3407 insn = NEXT_INSN (insn);
3408 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3409 break;
3412 return insn;
3415 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3416 This routine does not look inside SEQUENCEs. */
3418 rtx_insn *
3419 prev_nonnote_nondebug_insn (rtx uncast_insn)
3421 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3423 while (insn)
3425 insn = PREV_INSN (insn);
3426 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3427 break;
3430 return insn;
3433 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3434 or 0, if there is none. This routine does not look inside
3435 SEQUENCEs. */
3437 rtx_insn *
3438 next_real_insn (rtx uncast_insn)
3440 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3442 while (insn)
3444 insn = NEXT_INSN (insn);
3445 if (insn == 0 || INSN_P (insn))
3446 break;
3449 return insn;
3452 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3453 or 0, if there is none. This routine does not look inside
3454 SEQUENCEs. */
3456 rtx_insn *
3457 prev_real_insn (rtx uncast_insn)
3459 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3461 while (insn)
3463 insn = PREV_INSN (insn);
3464 if (insn == 0 || INSN_P (insn))
3465 break;
3468 return insn;
3471 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3472 This routine does not look inside SEQUENCEs. */
3474 rtx_call_insn *
3475 last_call_insn (void)
3477 rtx_insn *insn;
3479 for (insn = get_last_insn ();
3480 insn && !CALL_P (insn);
3481 insn = PREV_INSN (insn))
3484 return safe_as_a <rtx_call_insn *> (insn);
3487 /* Find the next insn after INSN that really does something. This routine
3488 does not look inside SEQUENCEs. After reload this also skips over
3489 standalone USE and CLOBBER insn. */
3492 active_insn_p (const_rtx insn)
3494 return (CALL_P (insn) || JUMP_P (insn)
3495 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3496 || (NONJUMP_INSN_P (insn)
3497 && (! reload_completed
3498 || (GET_CODE (PATTERN (insn)) != USE
3499 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3502 rtx_insn *
3503 next_active_insn (rtx uncast_insn)
3505 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3507 while (insn)
3509 insn = NEXT_INSN (insn);
3510 if (insn == 0 || active_insn_p (insn))
3511 break;
3514 return insn;
3517 /* Find the last insn before INSN that really does something. This routine
3518 does not look inside SEQUENCEs. After reload this also skips over
3519 standalone USE and CLOBBER insn. */
3521 rtx_insn *
3522 prev_active_insn (rtx uncast_insn)
3524 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3526 while (insn)
3528 insn = PREV_INSN (insn);
3529 if (insn == 0 || active_insn_p (insn))
3530 break;
3533 return insn;
3536 #ifdef HAVE_cc0
3537 /* Return the next insn that uses CC0 after INSN, which is assumed to
3538 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3539 applied to the result of this function should yield INSN).
3541 Normally, this is simply the next insn. However, if a REG_CC_USER note
3542 is present, it contains the insn that uses CC0.
3544 Return 0 if we can't find the insn. */
3546 rtx_insn *
3547 next_cc0_user (rtx uncast_insn)
3549 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3551 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3553 if (note)
3554 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3556 insn = next_nonnote_insn (insn);
3557 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3558 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3560 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3561 return insn;
3563 return 0;
3566 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3567 note, it is the previous insn. */
3569 rtx_insn *
3570 prev_cc0_setter (rtx uncast_insn)
3572 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3574 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3576 if (note)
3577 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3579 insn = prev_nonnote_insn (insn);
3580 gcc_assert (sets_cc0_p (PATTERN (insn)));
3582 return insn;
3584 #endif
3586 #ifdef AUTO_INC_DEC
3587 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3589 static int
3590 find_auto_inc (const_rtx x, const_rtx reg)
3592 subrtx_iterator::array_type array;
3593 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3595 const_rtx x = *iter;
3596 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3597 && rtx_equal_p (reg, XEXP (x, 0)))
3598 return true;
3600 return false;
3602 #endif
3604 /* Increment the label uses for all labels present in rtx. */
3606 static void
3607 mark_label_nuses (rtx x)
3609 enum rtx_code code;
3610 int i, j;
3611 const char *fmt;
3613 code = GET_CODE (x);
3614 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3615 LABEL_NUSES (LABEL_REF_LABEL (x))++;
3617 fmt = GET_RTX_FORMAT (code);
3618 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3620 if (fmt[i] == 'e')
3621 mark_label_nuses (XEXP (x, i));
3622 else if (fmt[i] == 'E')
3623 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3624 mark_label_nuses (XVECEXP (x, i, j));
3629 /* Try splitting insns that can be split for better scheduling.
3630 PAT is the pattern which might split.
3631 TRIAL is the insn providing PAT.
3632 LAST is nonzero if we should return the last insn of the sequence produced.
3634 If this routine succeeds in splitting, it returns the first or last
3635 replacement insn depending on the value of LAST. Otherwise, it
3636 returns TRIAL. If the insn to be returned can be split, it will be. */
3638 rtx_insn *
3639 try_split (rtx pat, rtx uncast_trial, int last)
3641 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3642 rtx_insn *before = PREV_INSN (trial);
3643 rtx_insn *after = NEXT_INSN (trial);
3644 rtx note;
3645 rtx_insn *seq, *tem;
3646 int probability;
3647 rtx_insn *insn_last, *insn;
3648 int njumps = 0;
3649 rtx call_insn = NULL_RTX;
3651 /* We're not good at redistributing frame information. */
3652 if (RTX_FRAME_RELATED_P (trial))
3653 return trial;
3655 if (any_condjump_p (trial)
3656 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3657 split_branch_probability = XINT (note, 0);
3658 probability = split_branch_probability;
3660 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3662 split_branch_probability = -1;
3664 if (!seq)
3665 return trial;
3667 /* Avoid infinite loop if any insn of the result matches
3668 the original pattern. */
3669 insn_last = seq;
3670 while (1)
3672 if (INSN_P (insn_last)
3673 && rtx_equal_p (PATTERN (insn_last), pat))
3674 return trial;
3675 if (!NEXT_INSN (insn_last))
3676 break;
3677 insn_last = NEXT_INSN (insn_last);
3680 /* We will be adding the new sequence to the function. The splitters
3681 may have introduced invalid RTL sharing, so unshare the sequence now. */
3682 unshare_all_rtl_in_chain (seq);
3684 /* Mark labels and copy flags. */
3685 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3687 if (JUMP_P (insn))
3689 if (JUMP_P (trial))
3690 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3691 mark_jump_label (PATTERN (insn), insn, 0);
3692 njumps++;
3693 if (probability != -1
3694 && any_condjump_p (insn)
3695 && !find_reg_note (insn, REG_BR_PROB, 0))
3697 /* We can preserve the REG_BR_PROB notes only if exactly
3698 one jump is created, otherwise the machine description
3699 is responsible for this step using
3700 split_branch_probability variable. */
3701 gcc_assert (njumps == 1);
3702 add_int_reg_note (insn, REG_BR_PROB, probability);
3707 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3708 in SEQ and copy any additional information across. */
3709 if (CALL_P (trial))
3711 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3712 if (CALL_P (insn))
3714 rtx_insn *next;
3715 rtx *p;
3717 gcc_assert (call_insn == NULL_RTX);
3718 call_insn = insn;
3720 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3721 target may have explicitly specified. */
3722 p = &CALL_INSN_FUNCTION_USAGE (insn);
3723 while (*p)
3724 p = &XEXP (*p, 1);
3725 *p = CALL_INSN_FUNCTION_USAGE (trial);
3727 /* If the old call was a sibling call, the new one must
3728 be too. */
3729 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3731 /* If the new call is the last instruction in the sequence,
3732 it will effectively replace the old call in-situ. Otherwise
3733 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3734 so that it comes immediately after the new call. */
3735 if (NEXT_INSN (insn))
3736 for (next = NEXT_INSN (trial);
3737 next && NOTE_P (next);
3738 next = NEXT_INSN (next))
3739 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3741 remove_insn (next);
3742 add_insn_after (next, insn, NULL);
3743 break;
3748 /* Copy notes, particularly those related to the CFG. */
3749 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3751 switch (REG_NOTE_KIND (note))
3753 case REG_EH_REGION:
3754 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3755 break;
3757 case REG_NORETURN:
3758 case REG_SETJMP:
3759 case REG_TM:
3760 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3762 if (CALL_P (insn))
3763 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3765 break;
3767 case REG_NON_LOCAL_GOTO:
3768 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3770 if (JUMP_P (insn))
3771 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3773 break;
3775 #ifdef AUTO_INC_DEC
3776 case REG_INC:
3777 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3779 rtx reg = XEXP (note, 0);
3780 if (!FIND_REG_INC_NOTE (insn, reg)
3781 && find_auto_inc (PATTERN (insn), reg))
3782 add_reg_note (insn, REG_INC, reg);
3784 break;
3785 #endif
3787 case REG_ARGS_SIZE:
3788 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3789 break;
3791 case REG_CALL_DECL:
3792 gcc_assert (call_insn != NULL_RTX);
3793 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3794 break;
3796 default:
3797 break;
3801 /* If there are LABELS inside the split insns increment the
3802 usage count so we don't delete the label. */
3803 if (INSN_P (trial))
3805 insn = insn_last;
3806 while (insn != NULL_RTX)
3808 /* JUMP_P insns have already been "marked" above. */
3809 if (NONJUMP_INSN_P (insn))
3810 mark_label_nuses (PATTERN (insn));
3812 insn = PREV_INSN (insn);
3816 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3818 delete_insn (trial);
3820 /* Recursively call try_split for each new insn created; by the
3821 time control returns here that insn will be fully split, so
3822 set LAST and continue from the insn after the one returned.
3823 We can't use next_active_insn here since AFTER may be a note.
3824 Ignore deleted insns, which can be occur if not optimizing. */
3825 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3826 if (! tem->deleted () && INSN_P (tem))
3827 tem = try_split (PATTERN (tem), tem, 1);
3829 /* Return either the first or the last insn, depending on which was
3830 requested. */
3831 return last
3832 ? (after ? PREV_INSN (after) : get_last_insn ())
3833 : NEXT_INSN (before);
3836 /* Make and return an INSN rtx, initializing all its slots.
3837 Store PATTERN in the pattern slots. */
3839 rtx_insn *
3840 make_insn_raw (rtx pattern)
3842 rtx_insn *insn;
3844 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3846 INSN_UID (insn) = cur_insn_uid++;
3847 PATTERN (insn) = pattern;
3848 INSN_CODE (insn) = -1;
3849 REG_NOTES (insn) = NULL;
3850 INSN_LOCATION (insn) = curr_insn_location ();
3851 BLOCK_FOR_INSN (insn) = NULL;
3853 #ifdef ENABLE_RTL_CHECKING
3854 if (insn
3855 && INSN_P (insn)
3856 && (returnjump_p (insn)
3857 || (GET_CODE (insn) == SET
3858 && SET_DEST (insn) == pc_rtx)))
3860 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3861 debug_rtx (insn);
3863 #endif
3865 return insn;
3868 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3870 static rtx_insn *
3871 make_debug_insn_raw (rtx pattern)
3873 rtx_debug_insn *insn;
3875 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3876 INSN_UID (insn) = cur_debug_insn_uid++;
3877 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3878 INSN_UID (insn) = cur_insn_uid++;
3880 PATTERN (insn) = pattern;
3881 INSN_CODE (insn) = -1;
3882 REG_NOTES (insn) = NULL;
3883 INSN_LOCATION (insn) = curr_insn_location ();
3884 BLOCK_FOR_INSN (insn) = NULL;
3886 return insn;
3889 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3891 static rtx_insn *
3892 make_jump_insn_raw (rtx pattern)
3894 rtx_jump_insn *insn;
3896 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3897 INSN_UID (insn) = cur_insn_uid++;
3899 PATTERN (insn) = pattern;
3900 INSN_CODE (insn) = -1;
3901 REG_NOTES (insn) = NULL;
3902 JUMP_LABEL (insn) = NULL;
3903 INSN_LOCATION (insn) = curr_insn_location ();
3904 BLOCK_FOR_INSN (insn) = NULL;
3906 return insn;
3909 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3911 static rtx_insn *
3912 make_call_insn_raw (rtx pattern)
3914 rtx_call_insn *insn;
3916 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3917 INSN_UID (insn) = cur_insn_uid++;
3919 PATTERN (insn) = pattern;
3920 INSN_CODE (insn) = -1;
3921 REG_NOTES (insn) = NULL;
3922 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3923 INSN_LOCATION (insn) = curr_insn_location ();
3924 BLOCK_FOR_INSN (insn) = NULL;
3926 return insn;
3929 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3931 static rtx_note *
3932 make_note_raw (enum insn_note subtype)
3934 /* Some notes are never created this way at all. These notes are
3935 only created by patching out insns. */
3936 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3937 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3939 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3940 INSN_UID (note) = cur_insn_uid++;
3941 NOTE_KIND (note) = subtype;
3942 BLOCK_FOR_INSN (note) = NULL;
3943 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3944 return note;
3947 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3948 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3949 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3951 static inline void
3952 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3954 SET_PREV_INSN (insn) = prev;
3955 SET_NEXT_INSN (insn) = next;
3956 if (prev != NULL)
3958 SET_NEXT_INSN (prev) = insn;
3959 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3961 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3962 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3965 if (next != NULL)
3967 SET_PREV_INSN (next) = insn;
3968 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3970 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3971 SET_PREV_INSN (sequence->insn (0)) = insn;
3975 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3977 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3978 SET_PREV_INSN (sequence->insn (0)) = prev;
3979 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3983 /* Add INSN to the end of the doubly-linked list.
3984 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3986 void
3987 add_insn (rtx_insn *insn)
3989 rtx_insn *prev = get_last_insn ();
3990 link_insn_into_chain (insn, prev, NULL);
3991 if (NULL == get_insns ())
3992 set_first_insn (insn);
3993 set_last_insn (insn);
3996 /* Add INSN into the doubly-linked list after insn AFTER. */
3998 static void
3999 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4001 rtx_insn *next = NEXT_INSN (after);
4003 gcc_assert (!optimize || !after->deleted ());
4005 link_insn_into_chain (insn, after, next);
4007 if (next == NULL)
4009 if (get_last_insn () == after)
4010 set_last_insn (insn);
4011 else
4013 struct sequence_stack *stack = seq_stack;
4014 /* Scan all pending sequences too. */
4015 for (; stack; stack = stack->next)
4016 if (after == stack->last)
4018 stack->last = insn;
4019 break;
4025 /* Add INSN into the doubly-linked list before insn BEFORE. */
4027 static void
4028 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4030 rtx_insn *prev = PREV_INSN (before);
4032 gcc_assert (!optimize || !before->deleted ());
4034 link_insn_into_chain (insn, prev, before);
4036 if (prev == NULL)
4038 if (get_insns () == before)
4039 set_first_insn (insn);
4040 else
4042 struct sequence_stack *stack = seq_stack;
4043 /* Scan all pending sequences too. */
4044 for (; stack; stack = stack->next)
4045 if (before == stack->first)
4047 stack->first = insn;
4048 break;
4051 gcc_assert (stack);
4056 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4057 If BB is NULL, an attempt is made to infer the bb from before.
4059 This and the next function should be the only functions called
4060 to insert an insn once delay slots have been filled since only
4061 they know how to update a SEQUENCE. */
4063 void
4064 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4066 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4067 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4068 add_insn_after_nobb (insn, after);
4069 if (!BARRIER_P (after)
4070 && !BARRIER_P (insn)
4071 && (bb = BLOCK_FOR_INSN (after)))
4073 set_block_for_insn (insn, bb);
4074 if (INSN_P (insn))
4075 df_insn_rescan (insn);
4076 /* Should not happen as first in the BB is always
4077 either NOTE or LABEL. */
4078 if (BB_END (bb) == after
4079 /* Avoid clobbering of structure when creating new BB. */
4080 && !BARRIER_P (insn)
4081 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4082 BB_END (bb) = insn;
4086 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4087 If BB is NULL, an attempt is made to infer the bb from before.
4089 This and the previous function should be the only functions called
4090 to insert an insn once delay slots have been filled since only
4091 they know how to update a SEQUENCE. */
4093 void
4094 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4096 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4097 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4098 add_insn_before_nobb (insn, before);
4100 if (!bb
4101 && !BARRIER_P (before)
4102 && !BARRIER_P (insn))
4103 bb = BLOCK_FOR_INSN (before);
4105 if (bb)
4107 set_block_for_insn (insn, bb);
4108 if (INSN_P (insn))
4109 df_insn_rescan (insn);
4110 /* Should not happen as first in the BB is always either NOTE or
4111 LABEL. */
4112 gcc_assert (BB_HEAD (bb) != insn
4113 /* Avoid clobbering of structure when creating new BB. */
4114 || BARRIER_P (insn)
4115 || NOTE_INSN_BASIC_BLOCK_P (insn));
4119 /* Replace insn with an deleted instruction note. */
4121 void
4122 set_insn_deleted (rtx insn)
4124 if (INSN_P (insn))
4125 df_insn_delete (as_a <rtx_insn *> (insn));
4126 PUT_CODE (insn, NOTE);
4127 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4131 /* Unlink INSN from the insn chain.
4133 This function knows how to handle sequences.
4135 This function does not invalidate data flow information associated with
4136 INSN (i.e. does not call df_insn_delete). That makes this function
4137 usable for only disconnecting an insn from the chain, and re-emit it
4138 elsewhere later.
4140 To later insert INSN elsewhere in the insn chain via add_insn and
4141 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4142 the caller. Nullifying them here breaks many insn chain walks.
4144 To really delete an insn and related DF information, use delete_insn. */
4146 void
4147 remove_insn (rtx uncast_insn)
4149 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4150 rtx_insn *next = NEXT_INSN (insn);
4151 rtx_insn *prev = PREV_INSN (insn);
4152 basic_block bb;
4154 if (prev)
4156 SET_NEXT_INSN (prev) = next;
4157 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4159 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4160 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4163 else if (get_insns () == insn)
4165 if (next)
4166 SET_PREV_INSN (next) = NULL;
4167 set_first_insn (next);
4169 else
4171 struct sequence_stack *stack = seq_stack;
4172 /* Scan all pending sequences too. */
4173 for (; stack; stack = stack->next)
4174 if (insn == stack->first)
4176 stack->first = next;
4177 break;
4180 gcc_assert (stack);
4183 if (next)
4185 SET_PREV_INSN (next) = prev;
4186 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4188 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4189 SET_PREV_INSN (sequence->insn (0)) = prev;
4192 else if (get_last_insn () == insn)
4193 set_last_insn (prev);
4194 else
4196 struct sequence_stack *stack = seq_stack;
4197 /* Scan all pending sequences too. */
4198 for (; stack; stack = stack->next)
4199 if (insn == stack->last)
4201 stack->last = prev;
4202 break;
4205 gcc_assert (stack);
4208 /* Fix up basic block boundaries, if necessary. */
4209 if (!BARRIER_P (insn)
4210 && (bb = BLOCK_FOR_INSN (insn)))
4212 if (BB_HEAD (bb) == insn)
4214 /* Never ever delete the basic block note without deleting whole
4215 basic block. */
4216 gcc_assert (!NOTE_P (insn));
4217 BB_HEAD (bb) = next;
4219 if (BB_END (bb) == insn)
4220 BB_END (bb) = prev;
4224 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4226 void
4227 add_function_usage_to (rtx call_insn, rtx call_fusage)
4229 gcc_assert (call_insn && CALL_P (call_insn));
4231 /* Put the register usage information on the CALL. If there is already
4232 some usage information, put ours at the end. */
4233 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4235 rtx link;
4237 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4238 link = XEXP (link, 1))
4241 XEXP (link, 1) = call_fusage;
4243 else
4244 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4247 /* Delete all insns made since FROM.
4248 FROM becomes the new last instruction. */
4250 void
4251 delete_insns_since (rtx_insn *from)
4253 if (from == 0)
4254 set_first_insn (0);
4255 else
4256 SET_NEXT_INSN (from) = 0;
4257 set_last_insn (from);
4260 /* This function is deprecated, please use sequences instead.
4262 Move a consecutive bunch of insns to a different place in the chain.
4263 The insns to be moved are those between FROM and TO.
4264 They are moved to a new position after the insn AFTER.
4265 AFTER must not be FROM or TO or any insn in between.
4267 This function does not know about SEQUENCEs and hence should not be
4268 called after delay-slot filling has been done. */
4270 void
4271 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4273 #ifdef ENABLE_CHECKING
4274 rtx_insn *x;
4275 for (x = from; x != to; x = NEXT_INSN (x))
4276 gcc_assert (after != x);
4277 gcc_assert (after != to);
4278 #endif
4280 /* Splice this bunch out of where it is now. */
4281 if (PREV_INSN (from))
4282 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4283 if (NEXT_INSN (to))
4284 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4285 if (get_last_insn () == to)
4286 set_last_insn (PREV_INSN (from));
4287 if (get_insns () == from)
4288 set_first_insn (NEXT_INSN (to));
4290 /* Make the new neighbors point to it and it to them. */
4291 if (NEXT_INSN (after))
4292 SET_PREV_INSN (NEXT_INSN (after)) = to;
4294 SET_NEXT_INSN (to) = NEXT_INSN (after);
4295 SET_PREV_INSN (from) = after;
4296 SET_NEXT_INSN (after) = from;
4297 if (after == get_last_insn ())
4298 set_last_insn (to);
4301 /* Same as function above, but take care to update BB boundaries. */
4302 void
4303 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4305 rtx_insn *prev = PREV_INSN (from);
4306 basic_block bb, bb2;
4308 reorder_insns_nobb (from, to, after);
4310 if (!BARRIER_P (after)
4311 && (bb = BLOCK_FOR_INSN (after)))
4313 rtx_insn *x;
4314 df_set_bb_dirty (bb);
4316 if (!BARRIER_P (from)
4317 && (bb2 = BLOCK_FOR_INSN (from)))
4319 if (BB_END (bb2) == to)
4320 BB_END (bb2) = prev;
4321 df_set_bb_dirty (bb2);
4324 if (BB_END (bb) == after)
4325 BB_END (bb) = to;
4327 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4328 if (!BARRIER_P (x))
4329 df_insn_change_bb (x, bb);
4334 /* Emit insn(s) of given code and pattern
4335 at a specified place within the doubly-linked list.
4337 All of the emit_foo global entry points accept an object
4338 X which is either an insn list or a PATTERN of a single
4339 instruction.
4341 There are thus a few canonical ways to generate code and
4342 emit it at a specific place in the instruction stream. For
4343 example, consider the instruction named SPOT and the fact that
4344 we would like to emit some instructions before SPOT. We might
4345 do it like this:
4347 start_sequence ();
4348 ... emit the new instructions ...
4349 insns_head = get_insns ();
4350 end_sequence ();
4352 emit_insn_before (insns_head, SPOT);
4354 It used to be common to generate SEQUENCE rtl instead, but that
4355 is a relic of the past which no longer occurs. The reason is that
4356 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4357 generated would almost certainly die right after it was created. */
4359 static rtx_insn *
4360 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4361 rtx_insn *(*make_raw) (rtx))
4363 rtx_insn *insn;
4365 gcc_assert (before);
4367 if (x == NULL_RTX)
4368 return safe_as_a <rtx_insn *> (last);
4370 switch (GET_CODE (x))
4372 case DEBUG_INSN:
4373 case INSN:
4374 case JUMP_INSN:
4375 case CALL_INSN:
4376 case CODE_LABEL:
4377 case BARRIER:
4378 case NOTE:
4379 insn = as_a <rtx_insn *> (x);
4380 while (insn)
4382 rtx_insn *next = NEXT_INSN (insn);
4383 add_insn_before (insn, before, bb);
4384 last = insn;
4385 insn = next;
4387 break;
4389 #ifdef ENABLE_RTL_CHECKING
4390 case SEQUENCE:
4391 gcc_unreachable ();
4392 break;
4393 #endif
4395 default:
4396 last = (*make_raw) (x);
4397 add_insn_before (last, before, bb);
4398 break;
4401 return safe_as_a <rtx_insn *> (last);
4404 /* Make X be output before the instruction BEFORE. */
4406 rtx_insn *
4407 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4409 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4412 /* Make an instruction with body X and code JUMP_INSN
4413 and output it before the instruction BEFORE. */
4415 rtx_insn *
4416 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4418 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4419 make_jump_insn_raw);
4422 /* Make an instruction with body X and code CALL_INSN
4423 and output it before the instruction BEFORE. */
4425 rtx_insn *
4426 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4428 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4429 make_call_insn_raw);
4432 /* Make an instruction with body X and code DEBUG_INSN
4433 and output it before the instruction BEFORE. */
4435 rtx_insn *
4436 emit_debug_insn_before_noloc (rtx x, rtx before)
4438 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4439 make_debug_insn_raw);
4442 /* Make an insn of code BARRIER
4443 and output it before the insn BEFORE. */
4445 rtx_barrier *
4446 emit_barrier_before (rtx before)
4448 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4450 INSN_UID (insn) = cur_insn_uid++;
4452 add_insn_before (insn, before, NULL);
4453 return insn;
4456 /* Emit the label LABEL before the insn BEFORE. */
4458 rtx_insn *
4459 emit_label_before (rtx label, rtx_insn *before)
4461 gcc_checking_assert (INSN_UID (label) == 0);
4462 INSN_UID (label) = cur_insn_uid++;
4463 add_insn_before (label, before, NULL);
4464 return as_a <rtx_insn *> (label);
4467 /* Helper for emit_insn_after, handles lists of instructions
4468 efficiently. */
4470 static rtx_insn *
4471 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4473 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4474 rtx_insn *last;
4475 rtx_insn *after_after;
4476 if (!bb && !BARRIER_P (after))
4477 bb = BLOCK_FOR_INSN (after);
4479 if (bb)
4481 df_set_bb_dirty (bb);
4482 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4483 if (!BARRIER_P (last))
4485 set_block_for_insn (last, bb);
4486 df_insn_rescan (last);
4488 if (!BARRIER_P (last))
4490 set_block_for_insn (last, bb);
4491 df_insn_rescan (last);
4493 if (BB_END (bb) == after)
4494 BB_END (bb) = last;
4496 else
4497 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4498 continue;
4500 after_after = NEXT_INSN (after);
4502 SET_NEXT_INSN (after) = first;
4503 SET_PREV_INSN (first) = after;
4504 SET_NEXT_INSN (last) = after_after;
4505 if (after_after)
4506 SET_PREV_INSN (after_after) = last;
4508 if (after == get_last_insn ())
4509 set_last_insn (last);
4511 return last;
4514 static rtx_insn *
4515 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4516 rtx_insn *(*make_raw)(rtx))
4518 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4519 rtx_insn *last = after;
4521 gcc_assert (after);
4523 if (x == NULL_RTX)
4524 return last;
4526 switch (GET_CODE (x))
4528 case DEBUG_INSN:
4529 case INSN:
4530 case JUMP_INSN:
4531 case CALL_INSN:
4532 case CODE_LABEL:
4533 case BARRIER:
4534 case NOTE:
4535 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4536 break;
4538 #ifdef ENABLE_RTL_CHECKING
4539 case SEQUENCE:
4540 gcc_unreachable ();
4541 break;
4542 #endif
4544 default:
4545 last = (*make_raw) (x);
4546 add_insn_after (last, after, bb);
4547 break;
4550 return last;
4553 /* Make X be output after the insn AFTER and set the BB of insn. If
4554 BB is NULL, an attempt is made to infer the BB from AFTER. */
4556 rtx_insn *
4557 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4559 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4563 /* Make an insn of code JUMP_INSN with body X
4564 and output it after the insn AFTER. */
4566 rtx_insn *
4567 emit_jump_insn_after_noloc (rtx x, rtx after)
4569 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4572 /* Make an instruction with body X and code CALL_INSN
4573 and output it after the instruction AFTER. */
4575 rtx_insn *
4576 emit_call_insn_after_noloc (rtx x, rtx after)
4578 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4581 /* Make an instruction with body X and code CALL_INSN
4582 and output it after the instruction AFTER. */
4584 rtx_insn *
4585 emit_debug_insn_after_noloc (rtx x, rtx after)
4587 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4590 /* Make an insn of code BARRIER
4591 and output it after the insn AFTER. */
4593 rtx_barrier *
4594 emit_barrier_after (rtx after)
4596 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4598 INSN_UID (insn) = cur_insn_uid++;
4600 add_insn_after (insn, after, NULL);
4601 return insn;
4604 /* Emit the label LABEL after the insn AFTER. */
4606 rtx_insn *
4607 emit_label_after (rtx label, rtx_insn *after)
4609 gcc_checking_assert (INSN_UID (label) == 0);
4610 INSN_UID (label) = cur_insn_uid++;
4611 add_insn_after (label, after, NULL);
4612 return as_a <rtx_insn *> (label);
4615 /* Notes require a bit of special handling: Some notes need to have their
4616 BLOCK_FOR_INSN set, others should never have it set, and some should
4617 have it set or clear depending on the context. */
4619 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4620 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4621 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4623 static bool
4624 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4626 switch (subtype)
4628 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4629 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4630 return true;
4632 /* Notes for var tracking and EH region markers can appear between or
4633 inside basic blocks. If the caller is emitting on the basic block
4634 boundary, do not set BLOCK_FOR_INSN on the new note. */
4635 case NOTE_INSN_VAR_LOCATION:
4636 case NOTE_INSN_CALL_ARG_LOCATION:
4637 case NOTE_INSN_EH_REGION_BEG:
4638 case NOTE_INSN_EH_REGION_END:
4639 return on_bb_boundary_p;
4641 /* Otherwise, BLOCK_FOR_INSN must be set. */
4642 default:
4643 return false;
4647 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4649 rtx_note *
4650 emit_note_after (enum insn_note subtype, rtx uncast_after)
4652 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4653 rtx_note *note = make_note_raw (subtype);
4654 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4655 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4657 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4658 add_insn_after_nobb (note, after);
4659 else
4660 add_insn_after (note, after, bb);
4661 return note;
4664 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4666 rtx_note *
4667 emit_note_before (enum insn_note subtype, rtx uncast_before)
4669 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4670 rtx_note *note = make_note_raw (subtype);
4671 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4672 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4674 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4675 add_insn_before_nobb (note, before);
4676 else
4677 add_insn_before (note, before, bb);
4678 return note;
4681 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4682 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4684 static rtx_insn *
4685 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4686 rtx_insn *(*make_raw) (rtx))
4688 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4689 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4691 if (pattern == NULL_RTX || !loc)
4692 return safe_as_a <rtx_insn *> (last);
4694 after = NEXT_INSN (after);
4695 while (1)
4697 if (active_insn_p (after) && !INSN_LOCATION (after))
4698 INSN_LOCATION (after) = loc;
4699 if (after == last)
4700 break;
4701 after = NEXT_INSN (after);
4703 return safe_as_a <rtx_insn *> (last);
4706 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4707 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4708 any DEBUG_INSNs. */
4710 static rtx_insn *
4711 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4712 rtx_insn *(*make_raw) (rtx))
4714 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4715 rtx_insn *prev = after;
4717 if (skip_debug_insns)
4718 while (DEBUG_INSN_P (prev))
4719 prev = PREV_INSN (prev);
4721 if (INSN_P (prev))
4722 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4723 make_raw);
4724 else
4725 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4728 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4729 rtx_insn *
4730 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4732 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4735 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4736 rtx_insn *
4737 emit_insn_after (rtx pattern, rtx after)
4739 return emit_pattern_after (pattern, after, true, make_insn_raw);
4742 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4743 rtx_insn *
4744 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4746 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4749 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4750 rtx_insn *
4751 emit_jump_insn_after (rtx pattern, rtx after)
4753 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4756 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4757 rtx_insn *
4758 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4760 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4763 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4764 rtx_insn *
4765 emit_call_insn_after (rtx pattern, rtx after)
4767 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4770 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4771 rtx_insn *
4772 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4774 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4777 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4778 rtx_insn *
4779 emit_debug_insn_after (rtx pattern, rtx after)
4781 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4784 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4785 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4786 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4787 CALL_INSN, etc. */
4789 static rtx_insn *
4790 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4791 rtx_insn *(*make_raw) (rtx))
4793 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4794 rtx_insn *first = PREV_INSN (before);
4795 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4796 insnp ? before : NULL_RTX,
4797 NULL, make_raw);
4799 if (pattern == NULL_RTX || !loc)
4800 return last;
4802 if (!first)
4803 first = get_insns ();
4804 else
4805 first = NEXT_INSN (first);
4806 while (1)
4808 if (active_insn_p (first) && !INSN_LOCATION (first))
4809 INSN_LOCATION (first) = loc;
4810 if (first == last)
4811 break;
4812 first = NEXT_INSN (first);
4814 return last;
4817 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4818 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4819 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4820 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4822 static rtx_insn *
4823 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4824 bool insnp, rtx_insn *(*make_raw) (rtx))
4826 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4827 rtx_insn *next = before;
4829 if (skip_debug_insns)
4830 while (DEBUG_INSN_P (next))
4831 next = PREV_INSN (next);
4833 if (INSN_P (next))
4834 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4835 insnp, make_raw);
4836 else
4837 return emit_pattern_before_noloc (pattern, before,
4838 insnp ? before : NULL_RTX,
4839 NULL, make_raw);
4842 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4843 rtx_insn *
4844 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4846 return emit_pattern_before_setloc (pattern, before, loc, true,
4847 make_insn_raw);
4850 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4851 rtx_insn *
4852 emit_insn_before (rtx pattern, rtx before)
4854 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4857 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4858 rtx_insn *
4859 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4861 return emit_pattern_before_setloc (pattern, before, loc, false,
4862 make_jump_insn_raw);
4865 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4866 rtx_insn *
4867 emit_jump_insn_before (rtx pattern, rtx before)
4869 return emit_pattern_before (pattern, before, true, false,
4870 make_jump_insn_raw);
4873 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4874 rtx_insn *
4875 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4877 return emit_pattern_before_setloc (pattern, before, loc, false,
4878 make_call_insn_raw);
4881 /* Like emit_call_insn_before_noloc,
4882 but set insn_location according to BEFORE. */
4883 rtx_insn *
4884 emit_call_insn_before (rtx pattern, rtx_insn *before)
4886 return emit_pattern_before (pattern, before, true, false,
4887 make_call_insn_raw);
4890 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4891 rtx_insn *
4892 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4894 return emit_pattern_before_setloc (pattern, before, loc, false,
4895 make_debug_insn_raw);
4898 /* Like emit_debug_insn_before_noloc,
4899 but set insn_location according to BEFORE. */
4900 rtx_insn *
4901 emit_debug_insn_before (rtx pattern, rtx before)
4903 return emit_pattern_before (pattern, before, false, false,
4904 make_debug_insn_raw);
4907 /* Take X and emit it at the end of the doubly-linked
4908 INSN list.
4910 Returns the last insn emitted. */
4912 rtx_insn *
4913 emit_insn (rtx x)
4915 rtx_insn *last = get_last_insn ();
4916 rtx_insn *insn;
4918 if (x == NULL_RTX)
4919 return last;
4921 switch (GET_CODE (x))
4923 case DEBUG_INSN:
4924 case INSN:
4925 case JUMP_INSN:
4926 case CALL_INSN:
4927 case CODE_LABEL:
4928 case BARRIER:
4929 case NOTE:
4930 insn = as_a <rtx_insn *> (x);
4931 while (insn)
4933 rtx_insn *next = NEXT_INSN (insn);
4934 add_insn (insn);
4935 last = insn;
4936 insn = next;
4938 break;
4940 #ifdef ENABLE_RTL_CHECKING
4941 case JUMP_TABLE_DATA:
4942 case SEQUENCE:
4943 gcc_unreachable ();
4944 break;
4945 #endif
4947 default:
4948 last = make_insn_raw (x);
4949 add_insn (last);
4950 break;
4953 return last;
4956 /* Make an insn of code DEBUG_INSN with pattern X
4957 and add it to the end of the doubly-linked list. */
4959 rtx_insn *
4960 emit_debug_insn (rtx x)
4962 rtx_insn *last = get_last_insn ();
4963 rtx_insn *insn;
4965 if (x == NULL_RTX)
4966 return last;
4968 switch (GET_CODE (x))
4970 case DEBUG_INSN:
4971 case INSN:
4972 case JUMP_INSN:
4973 case CALL_INSN:
4974 case CODE_LABEL:
4975 case BARRIER:
4976 case NOTE:
4977 insn = as_a <rtx_insn *> (x);
4978 while (insn)
4980 rtx_insn *next = NEXT_INSN (insn);
4981 add_insn (insn);
4982 last = insn;
4983 insn = next;
4985 break;
4987 #ifdef ENABLE_RTL_CHECKING
4988 case JUMP_TABLE_DATA:
4989 case SEQUENCE:
4990 gcc_unreachable ();
4991 break;
4992 #endif
4994 default:
4995 last = make_debug_insn_raw (x);
4996 add_insn (last);
4997 break;
5000 return last;
5003 /* Make an insn of code JUMP_INSN with pattern X
5004 and add it to the end of the doubly-linked list. */
5006 rtx_insn *
5007 emit_jump_insn (rtx x)
5009 rtx_insn *last = NULL;
5010 rtx_insn *insn;
5012 switch (GET_CODE (x))
5014 case DEBUG_INSN:
5015 case INSN:
5016 case JUMP_INSN:
5017 case CALL_INSN:
5018 case CODE_LABEL:
5019 case BARRIER:
5020 case NOTE:
5021 insn = as_a <rtx_insn *> (x);
5022 while (insn)
5024 rtx_insn *next = NEXT_INSN (insn);
5025 add_insn (insn);
5026 last = insn;
5027 insn = next;
5029 break;
5031 #ifdef ENABLE_RTL_CHECKING
5032 case JUMP_TABLE_DATA:
5033 case SEQUENCE:
5034 gcc_unreachable ();
5035 break;
5036 #endif
5038 default:
5039 last = make_jump_insn_raw (x);
5040 add_insn (last);
5041 break;
5044 return last;
5047 /* Make an insn of code CALL_INSN with pattern X
5048 and add it to the end of the doubly-linked list. */
5050 rtx_insn *
5051 emit_call_insn (rtx x)
5053 rtx_insn *insn;
5055 switch (GET_CODE (x))
5057 case DEBUG_INSN:
5058 case INSN:
5059 case JUMP_INSN:
5060 case CALL_INSN:
5061 case CODE_LABEL:
5062 case BARRIER:
5063 case NOTE:
5064 insn = emit_insn (x);
5065 break;
5067 #ifdef ENABLE_RTL_CHECKING
5068 case SEQUENCE:
5069 case JUMP_TABLE_DATA:
5070 gcc_unreachable ();
5071 break;
5072 #endif
5074 default:
5075 insn = make_call_insn_raw (x);
5076 add_insn (insn);
5077 break;
5080 return insn;
5083 /* Add the label LABEL to the end of the doubly-linked list. */
5085 rtx_insn *
5086 emit_label (rtx label)
5088 gcc_checking_assert (INSN_UID (label) == 0);
5089 INSN_UID (label) = cur_insn_uid++;
5090 add_insn (as_a <rtx_insn *> (label));
5091 return as_a <rtx_insn *> (label);
5094 /* Make an insn of code JUMP_TABLE_DATA
5095 and add it to the end of the doubly-linked list. */
5097 rtx_jump_table_data *
5098 emit_jump_table_data (rtx table)
5100 rtx_jump_table_data *jump_table_data =
5101 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5102 INSN_UID (jump_table_data) = cur_insn_uid++;
5103 PATTERN (jump_table_data) = table;
5104 BLOCK_FOR_INSN (jump_table_data) = NULL;
5105 add_insn (jump_table_data);
5106 return jump_table_data;
5109 /* Make an insn of code BARRIER
5110 and add it to the end of the doubly-linked list. */
5112 rtx_barrier *
5113 emit_barrier (void)
5115 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5116 INSN_UID (barrier) = cur_insn_uid++;
5117 add_insn (barrier);
5118 return barrier;
5121 /* Emit a copy of note ORIG. */
5123 rtx_note *
5124 emit_note_copy (rtx_note *orig)
5126 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5127 rtx_note *note = make_note_raw (kind);
5128 NOTE_DATA (note) = NOTE_DATA (orig);
5129 add_insn (note);
5130 return note;
5133 /* Make an insn of code NOTE or type NOTE_NO
5134 and add it to the end of the doubly-linked list. */
5136 rtx_note *
5137 emit_note (enum insn_note kind)
5139 rtx_note *note = make_note_raw (kind);
5140 add_insn (note);
5141 return note;
5144 /* Emit a clobber of lvalue X. */
5146 rtx_insn *
5147 emit_clobber (rtx x)
5149 /* CONCATs should not appear in the insn stream. */
5150 if (GET_CODE (x) == CONCAT)
5152 emit_clobber (XEXP (x, 0));
5153 return emit_clobber (XEXP (x, 1));
5155 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5158 /* Return a sequence of insns to clobber lvalue X. */
5160 rtx_insn *
5161 gen_clobber (rtx x)
5163 rtx_insn *seq;
5165 start_sequence ();
5166 emit_clobber (x);
5167 seq = get_insns ();
5168 end_sequence ();
5169 return seq;
5172 /* Emit a use of rvalue X. */
5174 rtx_insn *
5175 emit_use (rtx x)
5177 /* CONCATs should not appear in the insn stream. */
5178 if (GET_CODE (x) == CONCAT)
5180 emit_use (XEXP (x, 0));
5181 return emit_use (XEXP (x, 1));
5183 return emit_insn (gen_rtx_USE (VOIDmode, x));
5186 /* Return a sequence of insns to use rvalue X. */
5188 rtx_insn *
5189 gen_use (rtx x)
5191 rtx_insn *seq;
5193 start_sequence ();
5194 emit_use (x);
5195 seq = get_insns ();
5196 end_sequence ();
5197 return seq;
5200 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5201 Return the set in INSN that such notes describe, or NULL if the notes
5202 have no meaning for INSN. */
5205 set_for_reg_notes (rtx insn)
5207 rtx pat, reg;
5209 if (!INSN_P (insn))
5210 return NULL_RTX;
5212 pat = PATTERN (insn);
5213 if (GET_CODE (pat) == PARALLEL)
5215 /* We do not use single_set because that ignores SETs of unused
5216 registers. REG_EQUAL and REG_EQUIV notes really do require the
5217 PARALLEL to have a single SET. */
5218 if (multiple_sets (insn))
5219 return NULL_RTX;
5220 pat = XVECEXP (pat, 0, 0);
5223 if (GET_CODE (pat) != SET)
5224 return NULL_RTX;
5226 reg = SET_DEST (pat);
5228 /* Notes apply to the contents of a STRICT_LOW_PART. */
5229 if (GET_CODE (reg) == STRICT_LOW_PART)
5230 reg = XEXP (reg, 0);
5232 /* Check that we have a register. */
5233 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5234 return NULL_RTX;
5236 return pat;
5239 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5240 note of this type already exists, remove it first. */
5243 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5245 rtx note = find_reg_note (insn, kind, NULL_RTX);
5247 switch (kind)
5249 case REG_EQUAL:
5250 case REG_EQUIV:
5251 if (!set_for_reg_notes (insn))
5252 return NULL_RTX;
5254 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5255 It serves no useful purpose and breaks eliminate_regs. */
5256 if (GET_CODE (datum) == ASM_OPERANDS)
5257 return NULL_RTX;
5259 /* Notes with side effects are dangerous. Even if the side-effect
5260 initially mirrors one in PATTERN (INSN), later optimizations
5261 might alter the way that the final register value is calculated
5262 and so move or alter the side-effect in some way. The note would
5263 then no longer be a valid substitution for SET_SRC. */
5264 if (side_effects_p (datum))
5265 return NULL_RTX;
5266 break;
5268 default:
5269 break;
5272 if (note)
5273 XEXP (note, 0) = datum;
5274 else
5276 add_reg_note (insn, kind, datum);
5277 note = REG_NOTES (insn);
5280 switch (kind)
5282 case REG_EQUAL:
5283 case REG_EQUIV:
5284 df_notes_rescan (as_a <rtx_insn *> (insn));
5285 break;
5286 default:
5287 break;
5290 return note;
5293 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5295 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5297 rtx set = set_for_reg_notes (insn);
5299 if (set && SET_DEST (set) == dst)
5300 return set_unique_reg_note (insn, kind, datum);
5301 return NULL_RTX;
5304 /* Return an indication of which type of insn should have X as a body.
5305 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5307 static enum rtx_code
5308 classify_insn (rtx x)
5310 if (LABEL_P (x))
5311 return CODE_LABEL;
5312 if (GET_CODE (x) == CALL)
5313 return CALL_INSN;
5314 if (ANY_RETURN_P (x))
5315 return JUMP_INSN;
5316 if (GET_CODE (x) == SET)
5318 if (SET_DEST (x) == pc_rtx)
5319 return JUMP_INSN;
5320 else if (GET_CODE (SET_SRC (x)) == CALL)
5321 return CALL_INSN;
5322 else
5323 return INSN;
5325 if (GET_CODE (x) == PARALLEL)
5327 int j;
5328 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5329 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5330 return CALL_INSN;
5331 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5332 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5333 return JUMP_INSN;
5334 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5335 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5336 return CALL_INSN;
5338 return INSN;
5341 /* Emit the rtl pattern X as an appropriate kind of insn.
5342 If X is a label, it is simply added into the insn chain. */
5344 rtx_insn *
5345 emit (rtx x)
5347 enum rtx_code code = classify_insn (x);
5349 switch (code)
5351 case CODE_LABEL:
5352 return emit_label (x);
5353 case INSN:
5354 return emit_insn (x);
5355 case JUMP_INSN:
5357 rtx_insn *insn = emit_jump_insn (x);
5358 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5359 return emit_barrier ();
5360 return insn;
5362 case CALL_INSN:
5363 return emit_call_insn (x);
5364 case DEBUG_INSN:
5365 return emit_debug_insn (x);
5366 default:
5367 gcc_unreachable ();
5371 /* Space for free sequence stack entries. */
5372 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5374 /* Begin emitting insns to a sequence. If this sequence will contain
5375 something that might cause the compiler to pop arguments to function
5376 calls (because those pops have previously been deferred; see
5377 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5378 before calling this function. That will ensure that the deferred
5379 pops are not accidentally emitted in the middle of this sequence. */
5381 void
5382 start_sequence (void)
5384 struct sequence_stack *tem;
5386 if (free_sequence_stack != NULL)
5388 tem = free_sequence_stack;
5389 free_sequence_stack = tem->next;
5391 else
5392 tem = ggc_alloc<sequence_stack> ();
5394 tem->next = seq_stack;
5395 tem->first = get_insns ();
5396 tem->last = get_last_insn ();
5398 seq_stack = tem;
5400 set_first_insn (0);
5401 set_last_insn (0);
5404 /* Set up the insn chain starting with FIRST as the current sequence,
5405 saving the previously current one. See the documentation for
5406 start_sequence for more information about how to use this function. */
5408 void
5409 push_to_sequence (rtx_insn *first)
5411 rtx_insn *last;
5413 start_sequence ();
5415 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5418 set_first_insn (first);
5419 set_last_insn (last);
5422 /* Like push_to_sequence, but take the last insn as an argument to avoid
5423 looping through the list. */
5425 void
5426 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5428 start_sequence ();
5430 set_first_insn (first);
5431 set_last_insn (last);
5434 /* Set up the outer-level insn chain
5435 as the current sequence, saving the previously current one. */
5437 void
5438 push_topmost_sequence (void)
5440 struct sequence_stack *stack, *top = NULL;
5442 start_sequence ();
5444 for (stack = seq_stack; stack; stack = stack->next)
5445 top = stack;
5447 set_first_insn (top->first);
5448 set_last_insn (top->last);
5451 /* After emitting to the outer-level insn chain, update the outer-level
5452 insn chain, and restore the previous saved state. */
5454 void
5455 pop_topmost_sequence (void)
5457 struct sequence_stack *stack, *top = NULL;
5459 for (stack = seq_stack; stack; stack = stack->next)
5460 top = stack;
5462 top->first = get_insns ();
5463 top->last = get_last_insn ();
5465 end_sequence ();
5468 /* After emitting to a sequence, restore previous saved state.
5470 To get the contents of the sequence just made, you must call
5471 `get_insns' *before* calling here.
5473 If the compiler might have deferred popping arguments while
5474 generating this sequence, and this sequence will not be immediately
5475 inserted into the instruction stream, use do_pending_stack_adjust
5476 before calling get_insns. That will ensure that the deferred
5477 pops are inserted into this sequence, and not into some random
5478 location in the instruction stream. See INHIBIT_DEFER_POP for more
5479 information about deferred popping of arguments. */
5481 void
5482 end_sequence (void)
5484 struct sequence_stack *tem = seq_stack;
5486 set_first_insn (tem->first);
5487 set_last_insn (tem->last);
5488 seq_stack = tem->next;
5490 memset (tem, 0, sizeof (*tem));
5491 tem->next = free_sequence_stack;
5492 free_sequence_stack = tem;
5495 /* Return 1 if currently emitting into a sequence. */
5498 in_sequence_p (void)
5500 return seq_stack != 0;
5503 /* Put the various virtual registers into REGNO_REG_RTX. */
5505 static void
5506 init_virtual_regs (void)
5508 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5509 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5510 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5511 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5512 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5513 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5514 = virtual_preferred_stack_boundary_rtx;
5518 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5519 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5520 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5521 static int copy_insn_n_scratches;
5523 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5524 copied an ASM_OPERANDS.
5525 In that case, it is the original input-operand vector. */
5526 static rtvec orig_asm_operands_vector;
5528 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5529 copied an ASM_OPERANDS.
5530 In that case, it is the copied input-operand vector. */
5531 static rtvec copy_asm_operands_vector;
5533 /* Likewise for the constraints vector. */
5534 static rtvec orig_asm_constraints_vector;
5535 static rtvec copy_asm_constraints_vector;
5537 /* Recursively create a new copy of an rtx for copy_insn.
5538 This function differs from copy_rtx in that it handles SCRATCHes and
5539 ASM_OPERANDs properly.
5540 Normally, this function is not used directly; use copy_insn as front end.
5541 However, you could first copy an insn pattern with copy_insn and then use
5542 this function afterwards to properly copy any REG_NOTEs containing
5543 SCRATCHes. */
5546 copy_insn_1 (rtx orig)
5548 rtx copy;
5549 int i, j;
5550 RTX_CODE code;
5551 const char *format_ptr;
5553 if (orig == NULL)
5554 return NULL;
5556 code = GET_CODE (orig);
5558 switch (code)
5560 case REG:
5561 case DEBUG_EXPR:
5562 CASE_CONST_ANY:
5563 case SYMBOL_REF:
5564 case CODE_LABEL:
5565 case PC:
5566 case CC0:
5567 case RETURN:
5568 case SIMPLE_RETURN:
5569 return orig;
5570 case CLOBBER:
5571 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5572 clobbers or clobbers of hard registers that originated as pseudos.
5573 This is needed to allow safe register renaming. */
5574 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5575 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5576 return orig;
5577 break;
5579 case SCRATCH:
5580 for (i = 0; i < copy_insn_n_scratches; i++)
5581 if (copy_insn_scratch_in[i] == orig)
5582 return copy_insn_scratch_out[i];
5583 break;
5585 case CONST:
5586 if (shared_const_p (orig))
5587 return orig;
5588 break;
5590 /* A MEM with a constant address is not sharable. The problem is that
5591 the constant address may need to be reloaded. If the mem is shared,
5592 then reloading one copy of this mem will cause all copies to appear
5593 to have been reloaded. */
5595 default:
5596 break;
5599 /* Copy the various flags, fields, and other information. We assume
5600 that all fields need copying, and then clear the fields that should
5601 not be copied. That is the sensible default behavior, and forces
5602 us to explicitly document why we are *not* copying a flag. */
5603 copy = shallow_copy_rtx (orig);
5605 /* We do not copy the USED flag, which is used as a mark bit during
5606 walks over the RTL. */
5607 RTX_FLAG (copy, used) = 0;
5609 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5610 if (INSN_P (orig))
5612 RTX_FLAG (copy, jump) = 0;
5613 RTX_FLAG (copy, call) = 0;
5614 RTX_FLAG (copy, frame_related) = 0;
5617 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5619 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5620 switch (*format_ptr++)
5622 case 'e':
5623 if (XEXP (orig, i) != NULL)
5624 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5625 break;
5627 case 'E':
5628 case 'V':
5629 if (XVEC (orig, i) == orig_asm_constraints_vector)
5630 XVEC (copy, i) = copy_asm_constraints_vector;
5631 else if (XVEC (orig, i) == orig_asm_operands_vector)
5632 XVEC (copy, i) = copy_asm_operands_vector;
5633 else if (XVEC (orig, i) != NULL)
5635 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5636 for (j = 0; j < XVECLEN (copy, i); j++)
5637 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5639 break;
5641 case 't':
5642 case 'w':
5643 case 'i':
5644 case 's':
5645 case 'S':
5646 case 'u':
5647 case '0':
5648 /* These are left unchanged. */
5649 break;
5651 default:
5652 gcc_unreachable ();
5655 if (code == SCRATCH)
5657 i = copy_insn_n_scratches++;
5658 gcc_assert (i < MAX_RECOG_OPERANDS);
5659 copy_insn_scratch_in[i] = orig;
5660 copy_insn_scratch_out[i] = copy;
5662 else if (code == ASM_OPERANDS)
5664 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5665 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5666 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5667 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5670 return copy;
5673 /* Create a new copy of an rtx.
5674 This function differs from copy_rtx in that it handles SCRATCHes and
5675 ASM_OPERANDs properly.
5676 INSN doesn't really have to be a full INSN; it could be just the
5677 pattern. */
5679 copy_insn (rtx insn)
5681 copy_insn_n_scratches = 0;
5682 orig_asm_operands_vector = 0;
5683 orig_asm_constraints_vector = 0;
5684 copy_asm_operands_vector = 0;
5685 copy_asm_constraints_vector = 0;
5686 return copy_insn_1 (insn);
5689 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5690 on that assumption that INSN itself remains in its original place. */
5692 rtx_insn *
5693 copy_delay_slot_insn (rtx_insn *insn)
5695 /* Copy INSN with its rtx_code, all its notes, location etc. */
5696 insn = as_a <rtx_insn *> (copy_rtx (insn));
5697 INSN_UID (insn) = cur_insn_uid++;
5698 return insn;
5701 /* Initialize data structures and variables in this file
5702 before generating rtl for each function. */
5704 void
5705 init_emit (void)
5707 set_first_insn (NULL);
5708 set_last_insn (NULL);
5709 if (MIN_NONDEBUG_INSN_UID)
5710 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5711 else
5712 cur_insn_uid = 1;
5713 cur_debug_insn_uid = 1;
5714 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5715 first_label_num = label_num;
5716 seq_stack = NULL;
5718 /* Init the tables that describe all the pseudo regs. */
5720 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5722 crtl->emit.regno_pointer_align
5723 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5725 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5727 /* Put copies of all the hard registers into regno_reg_rtx. */
5728 memcpy (regno_reg_rtx,
5729 initial_regno_reg_rtx,
5730 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5732 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5733 init_virtual_regs ();
5735 /* Indicate that the virtual registers and stack locations are
5736 all pointers. */
5737 REG_POINTER (stack_pointer_rtx) = 1;
5738 REG_POINTER (frame_pointer_rtx) = 1;
5739 REG_POINTER (hard_frame_pointer_rtx) = 1;
5740 REG_POINTER (arg_pointer_rtx) = 1;
5742 REG_POINTER (virtual_incoming_args_rtx) = 1;
5743 REG_POINTER (virtual_stack_vars_rtx) = 1;
5744 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5745 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5746 REG_POINTER (virtual_cfa_rtx) = 1;
5748 #ifdef STACK_BOUNDARY
5749 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5750 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5751 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5752 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5754 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5755 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5756 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5757 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5758 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5759 #endif
5761 #ifdef INIT_EXPANDERS
5762 INIT_EXPANDERS;
5763 #endif
5766 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5768 static rtx
5769 gen_const_vector (machine_mode mode, int constant)
5771 rtx tem;
5772 rtvec v;
5773 int units, i;
5774 machine_mode inner;
5776 units = GET_MODE_NUNITS (mode);
5777 inner = GET_MODE_INNER (mode);
5779 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5781 v = rtvec_alloc (units);
5783 /* We need to call this function after we set the scalar const_tiny_rtx
5784 entries. */
5785 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5787 for (i = 0; i < units; ++i)
5788 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5790 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5791 return tem;
5794 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5795 all elements are zero, and the one vector when all elements are one. */
5797 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5799 machine_mode inner = GET_MODE_INNER (mode);
5800 int nunits = GET_MODE_NUNITS (mode);
5801 rtx x;
5802 int i;
5804 /* Check to see if all of the elements have the same value. */
5805 x = RTVEC_ELT (v, nunits - 1);
5806 for (i = nunits - 2; i >= 0; i--)
5807 if (RTVEC_ELT (v, i) != x)
5808 break;
5810 /* If the values are all the same, check to see if we can use one of the
5811 standard constant vectors. */
5812 if (i == -1)
5814 if (x == CONST0_RTX (inner))
5815 return CONST0_RTX (mode);
5816 else if (x == CONST1_RTX (inner))
5817 return CONST1_RTX (mode);
5818 else if (x == CONSTM1_RTX (inner))
5819 return CONSTM1_RTX (mode);
5822 return gen_rtx_raw_CONST_VECTOR (mode, v);
5825 /* Initialise global register information required by all functions. */
5827 void
5828 init_emit_regs (void)
5830 int i;
5831 machine_mode mode;
5832 mem_attrs *attrs;
5834 /* Reset register attributes */
5835 reg_attrs_htab->empty ();
5837 /* We need reg_raw_mode, so initialize the modes now. */
5838 init_reg_modes_target ();
5840 /* Assign register numbers to the globally defined register rtx. */
5841 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5842 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5843 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5844 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5845 virtual_incoming_args_rtx =
5846 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5847 virtual_stack_vars_rtx =
5848 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5849 virtual_stack_dynamic_rtx =
5850 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5851 virtual_outgoing_args_rtx =
5852 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5853 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5854 virtual_preferred_stack_boundary_rtx =
5855 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5857 /* Initialize RTL for commonly used hard registers. These are
5858 copied into regno_reg_rtx as we begin to compile each function. */
5859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5860 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5862 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5863 return_address_pointer_rtx
5864 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5865 #endif
5867 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5868 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5869 else
5870 pic_offset_table_rtx = NULL_RTX;
5872 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5874 mode = (machine_mode) i;
5875 attrs = ggc_cleared_alloc<mem_attrs> ();
5876 attrs->align = BITS_PER_UNIT;
5877 attrs->addrspace = ADDR_SPACE_GENERIC;
5878 if (mode != BLKmode)
5880 attrs->size_known_p = true;
5881 attrs->size = GET_MODE_SIZE (mode);
5882 if (STRICT_ALIGNMENT)
5883 attrs->align = GET_MODE_ALIGNMENT (mode);
5885 mode_mem_attrs[i] = attrs;
5889 /* Initialize global machine_mode variables. */
5891 void
5892 init_derived_machine_modes (void)
5894 byte_mode = VOIDmode;
5895 word_mode = VOIDmode;
5897 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5898 mode != VOIDmode;
5899 mode = GET_MODE_WIDER_MODE (mode))
5901 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5902 && byte_mode == VOIDmode)
5903 byte_mode = mode;
5905 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5906 && word_mode == VOIDmode)
5907 word_mode = mode;
5910 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5913 /* Create some permanent unique rtl objects shared between all functions. */
5915 void
5916 init_emit_once (void)
5918 int i;
5919 machine_mode mode;
5920 machine_mode double_mode;
5922 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5923 CONST_FIXED, and memory attribute hash tables. */
5924 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5926 #if TARGET_SUPPORTS_WIDE_INT
5927 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5928 #endif
5929 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5931 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5933 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5935 #ifdef INIT_EXPANDERS
5936 /* This is to initialize {init|mark|free}_machine_status before the first
5937 call to push_function_context_to. This is needed by the Chill front
5938 end which calls push_function_context_to before the first call to
5939 init_function_start. */
5940 INIT_EXPANDERS;
5941 #endif
5943 /* Create the unique rtx's for certain rtx codes and operand values. */
5945 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5946 tries to use these variables. */
5947 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5948 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5949 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5951 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5952 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5953 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5954 else
5955 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5957 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5959 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5960 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5961 real_from_integer (&dconst2, double_mode, 2, SIGNED);
5963 dconstm1 = dconst1;
5964 dconstm1.sign = 1;
5966 dconsthalf = dconst1;
5967 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5969 for (i = 0; i < 3; i++)
5971 const REAL_VALUE_TYPE *const r =
5972 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5974 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5975 mode != VOIDmode;
5976 mode = GET_MODE_WIDER_MODE (mode))
5977 const_tiny_rtx[i][(int) mode] =
5978 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5981 mode != VOIDmode;
5982 mode = GET_MODE_WIDER_MODE (mode))
5983 const_tiny_rtx[i][(int) mode] =
5984 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5986 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
5991 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5993 for (mode = MIN_MODE_PARTIAL_INT;
5994 mode <= MAX_MODE_PARTIAL_INT;
5995 mode = (machine_mode)((int)(mode) + 1))
5996 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5999 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6006 for (mode = MIN_MODE_PARTIAL_INT;
6007 mode <= MAX_MODE_PARTIAL_INT;
6008 mode = (machine_mode)((int)(mode) + 1))
6009 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6012 mode != VOIDmode;
6013 mode = GET_MODE_WIDER_MODE (mode))
6015 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6016 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6020 mode != VOIDmode;
6021 mode = GET_MODE_WIDER_MODE (mode))
6023 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6024 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6028 mode != VOIDmode;
6029 mode = GET_MODE_WIDER_MODE (mode))
6031 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6032 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6033 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6036 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6037 mode != VOIDmode;
6038 mode = GET_MODE_WIDER_MODE (mode))
6040 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6041 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6045 mode != VOIDmode;
6046 mode = GET_MODE_WIDER_MODE (mode))
6048 FCONST0 (mode).data.high = 0;
6049 FCONST0 (mode).data.low = 0;
6050 FCONST0 (mode).mode = mode;
6051 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST0 (mode), mode);
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
6066 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6067 mode != VOIDmode;
6068 mode = GET_MODE_WIDER_MODE (mode))
6070 FCONST0 (mode).data.high = 0;
6071 FCONST0 (mode).data.low = 0;
6072 FCONST0 (mode).mode = mode;
6073 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST0 (mode), mode);
6076 /* We store the value 1. */
6077 FCONST1 (mode).data.high = 0;
6078 FCONST1 (mode).data.low = 0;
6079 FCONST1 (mode).mode = mode;
6080 FCONST1 (mode).data
6081 = double_int_one.lshift (GET_MODE_FBIT (mode),
6082 HOST_BITS_PER_DOUBLE_INT,
6083 SIGNED_FIXED_POINT_MODE_P (mode));
6084 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6085 FCONST1 (mode), mode);
6088 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6089 mode != VOIDmode;
6090 mode = GET_MODE_WIDER_MODE (mode))
6092 FCONST0 (mode).data.high = 0;
6093 FCONST0 (mode).data.low = 0;
6094 FCONST0 (mode).mode = mode;
6095 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6096 FCONST0 (mode), mode);
6098 /* We store the value 1. */
6099 FCONST1 (mode).data.high = 0;
6100 FCONST1 (mode).data.low = 0;
6101 FCONST1 (mode).mode = mode;
6102 FCONST1 (mode).data
6103 = double_int_one.lshift (GET_MODE_FBIT (mode),
6104 HOST_BITS_PER_DOUBLE_INT,
6105 SIGNED_FIXED_POINT_MODE_P (mode));
6106 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6107 FCONST1 (mode), mode);
6110 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6111 mode != VOIDmode;
6112 mode = GET_MODE_WIDER_MODE (mode))
6114 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6117 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6118 mode != VOIDmode;
6119 mode = GET_MODE_WIDER_MODE (mode))
6121 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6125 mode != VOIDmode;
6126 mode = GET_MODE_WIDER_MODE (mode))
6128 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6129 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6133 mode != VOIDmode;
6134 mode = GET_MODE_WIDER_MODE (mode))
6136 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6137 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6140 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6141 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6142 const_tiny_rtx[0][i] = const0_rtx;
6144 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6145 if (STORE_FLAG_VALUE == 1)
6146 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6148 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6149 mode != VOIDmode;
6150 mode = GET_MODE_WIDER_MODE (mode))
6152 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6153 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6156 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6157 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6158 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6159 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6162 /* Produce exact duplicate of insn INSN after AFTER.
6163 Care updating of libcall regions if present. */
6165 rtx_insn *
6166 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6168 rtx_insn *new_rtx;
6169 rtx link;
6171 switch (GET_CODE (insn))
6173 case INSN:
6174 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6175 break;
6177 case JUMP_INSN:
6178 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6179 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6180 break;
6182 case DEBUG_INSN:
6183 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6184 break;
6186 case CALL_INSN:
6187 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6188 if (CALL_INSN_FUNCTION_USAGE (insn))
6189 CALL_INSN_FUNCTION_USAGE (new_rtx)
6190 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6191 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6192 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6193 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6194 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6195 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6196 break;
6198 default:
6199 gcc_unreachable ();
6202 /* Update LABEL_NUSES. */
6203 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6205 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6207 /* If the old insn is frame related, then so is the new one. This is
6208 primarily needed for IA-64 unwind info which marks epilogue insns,
6209 which may be duplicated by the basic block reordering code. */
6210 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6212 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6213 will make them. REG_LABEL_TARGETs are created there too, but are
6214 supposed to be sticky, so we copy them. */
6215 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6216 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6218 if (GET_CODE (link) == EXPR_LIST)
6219 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6220 copy_insn_1 (XEXP (link, 0)));
6221 else
6222 add_shallow_copy_of_reg_note (new_rtx, link);
6225 INSN_CODE (new_rtx) = INSN_CODE (insn);
6226 return new_rtx;
6229 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6231 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6233 if (hard_reg_clobbers[mode][regno])
6234 return hard_reg_clobbers[mode][regno];
6235 else
6236 return (hard_reg_clobbers[mode][regno] =
6237 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6240 location_t prologue_location;
6241 location_t epilogue_location;
6243 /* Hold current location information and last location information, so the
6244 datastructures are built lazily only when some instructions in given
6245 place are needed. */
6246 static location_t curr_location;
6248 /* Allocate insn location datastructure. */
6249 void
6250 insn_locations_init (void)
6252 prologue_location = epilogue_location = 0;
6253 curr_location = UNKNOWN_LOCATION;
6256 /* At the end of emit stage, clear current location. */
6257 void
6258 insn_locations_finalize (void)
6260 epilogue_location = curr_location;
6261 curr_location = UNKNOWN_LOCATION;
6264 /* Set current location. */
6265 void
6266 set_curr_insn_location (location_t location)
6268 curr_location = location;
6271 /* Get current location. */
6272 location_t
6273 curr_insn_location (void)
6275 return curr_location;
6278 /* Return lexical scope block insn belongs to. */
6279 tree
6280 insn_scope (const rtx_insn *insn)
6282 return LOCATION_BLOCK (INSN_LOCATION (insn));
6285 /* Return line number of the statement that produced this insn. */
6287 insn_line (const rtx_insn *insn)
6289 return LOCATION_LINE (INSN_LOCATION (insn));
6292 /* Return source file of the statement that produced this insn. */
6293 const char *
6294 insn_file (const rtx_insn *insn)
6296 return LOCATION_FILE (INSN_LOCATION (insn));
6299 /* Return expanded location of the statement that produced this insn. */
6300 expanded_location
6301 insn_location (const rtx_insn *insn)
6303 return expand_location (INSN_LOCATION (insn));
6306 /* Return true if memory model MODEL requires a pre-operation (release-style)
6307 barrier or a post-operation (acquire-style) barrier. While not universal,
6308 this function matches behavior of several targets. */
6310 bool
6311 need_atomic_barrier_p (enum memmodel model, bool pre)
6313 switch (model & MEMMODEL_MASK)
6315 case MEMMODEL_RELAXED:
6316 case MEMMODEL_CONSUME:
6317 return false;
6318 case MEMMODEL_RELEASE:
6319 return pre;
6320 case MEMMODEL_ACQUIRE:
6321 return !pre;
6322 case MEMMODEL_ACQ_REL:
6323 case MEMMODEL_SEQ_CST:
6324 return true;
6325 default:
6326 gcc_unreachable ();
6330 #include "gt-emit-rtl.h"