1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* Middle-to-low level generation of rtx code and insns.
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
53 #include "basic-block.h"
56 /* Commonly used modes. */
58 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
59 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
60 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
61 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
64 /* This is *not* reset after each function. It gives each CODE_LABEL
65 in the entire compilation a unique label number. */
67 static int label_num
= 1;
69 /* Highest label number in current function.
70 Zero means use the value of label_num instead.
71 This is nonzero only when belatedly compiling an inline function. */
73 static int last_label_num
;
75 /* Value label_num had when set_new_first_and_last_label_number was called.
76 If label_num has not changed since then, last_label_num is valid. */
78 static int base_label_num
;
80 /* Nonzero means do not generate NOTEs for source line numbers. */
82 static int no_line_numbers
;
84 /* Commonly used rtx's, so that we only need space for one copy.
85 These are initialized once for the entire compilation.
86 All of these except perhaps the floating-point CONST_DOUBLEs
87 are unique; no other rtx-object will be equal to any of these. */
89 rtx global_rtl
[GR_MAX
];
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
95 rtx const_tiny_rtx
[3][(int) MAX_MACHINE_MODE
];
99 REAL_VALUE_TYPE dconst0
;
100 REAL_VALUE_TYPE dconst1
;
101 REAL_VALUE_TYPE dconst2
;
102 REAL_VALUE_TYPE dconstm1
;
104 /* All references to the following fixed hard registers go through
105 these unique rtl objects. On machines where the frame-pointer and
106 arg-pointer are the same register, they use the same unique object.
108 After register allocation, other rtl objects which used to be pseudo-regs
109 may be clobbered to refer to the frame-pointer register.
110 But references that were originally to the frame-pointer can be
111 distinguished from the others because they contain frame_pointer_rtx.
113 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
114 tricky: until register elimination has taken place hard_frame_pointer_rtx
115 should be used if it is being set, and frame_pointer_rtx otherwise. After
116 register elimination hard_frame_pointer_rtx should always be used.
117 On machines where the two registers are same (most) then these are the
120 In an inline procedure, the stack and frame pointer rtxs may not be
121 used for anything else. */
122 rtx struct_value_rtx
; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
123 rtx struct_value_incoming_rtx
; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
124 rtx static_chain_rtx
; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
125 rtx static_chain_incoming_rtx
; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
126 rtx pic_offset_table_rtx
; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
128 /* This is used to implement __builtin_return_address for some machines.
129 See for instance the MIPS port. */
130 rtx return_address_pointer_rtx
; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
132 /* We make one copy of (const_int C) where C is in
133 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
134 to save space during the compilation and simplify comparisons of
137 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
139 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
140 shortly thrown away. We use two mechanisms to prevent this waste:
142 For sizes up to 5 elements, we keep a SEQUENCE and its associated
143 rtvec for use by gen_sequence. One entry for each size is
144 sufficient because most cases are calls to gen_sequence followed by
145 immediately emitting the SEQUENCE. Reuse is safe since emitting a
146 sequence is destructive on the insn in it anyway and hence can't be
149 We do not bother to save this cached data over nested function calls.
150 Instead, we just reinitialize them. */
152 #define SEQUENCE_RESULT_SIZE 5
154 static rtx sequence_result
[SEQUENCE_RESULT_SIZE
];
156 /* During RTL generation, we also keep a list of free INSN rtl codes. */
157 static rtx free_insn
;
159 #define first_insn (current_function->emit->x_first_insn)
160 #define last_insn (current_function->emit->x_last_insn)
161 #define cur_insn_uid (current_function->emit->x_cur_insn_uid)
162 #define last_linenum (current_function->emit->x_last_linenum)
163 #define last_filename (current_function->emit->x_last_filename)
164 #define first_label_num (current_function->emit->x_first_label_num)
166 /* This is where the pointer to the obstack being used for RTL is stored. */
167 extern struct obstack
*rtl_obstack
;
169 static rtx make_jump_insn_raw
PROTO((rtx
));
170 static rtx make_call_insn_raw
PROTO((rtx
));
171 static rtx find_line_note
PROTO((rtx
));
172 static void mark_sequence_stack
PROTO((struct sequence_stack
*));
174 /* There are some RTL codes that require special attention; the generation
175 functions do the raw handling. If you add to this list, modify
176 special_rtx in gengenrtl.c as well. */
179 gen_rtx_CONST_INT (mode
, arg
)
180 enum machine_mode mode
;
183 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
184 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
186 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
187 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
188 return const_true_rtx
;
191 return gen_rtx_raw_CONST_INT (mode
, arg
);
194 /* CONST_DOUBLEs needs special handling because its length is known
197 gen_rtx_CONST_DOUBLE (mode
, arg0
, arg1
, arg2
)
198 enum machine_mode mode
;
200 HOST_WIDE_INT arg1
, arg2
;
202 rtx r
= rtx_alloc (CONST_DOUBLE
);
207 X0EXP (r
, 1) = NULL_RTX
;
211 for (i
= GET_RTX_LENGTH (CONST_DOUBLE
) - 1; i
> 3; --i
)
218 gen_rtx_REG (mode
, regno
)
219 enum machine_mode mode
;
222 /* In case the MD file explicitly references the frame pointer, have
223 all such references point to the same frame pointer. This is
224 used during frame pointer elimination to distinguish the explicit
225 references to these registers from pseudos that happened to be
228 If we have eliminated the frame pointer or arg pointer, we will
229 be using it as a normal register, for example as a spill
230 register. In such cases, we might be accessing it in a mode that
231 is not Pmode and therefore cannot use the pre-allocated rtx.
233 Also don't do this when we are making new REGs in reload, since
234 we don't want to get confused with the real pointers. */
236 if (mode
== Pmode
&& !reload_in_progress
)
238 if (regno
== FRAME_POINTER_REGNUM
)
239 return frame_pointer_rtx
;
240 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
241 if (regno
== HARD_FRAME_POINTER_REGNUM
)
242 return hard_frame_pointer_rtx
;
244 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
245 if (regno
== ARG_POINTER_REGNUM
)
246 return arg_pointer_rtx
;
248 #ifdef RETURN_ADDRESS_POINTER_REGNUM
249 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
250 return return_address_pointer_rtx
;
252 if (regno
== STACK_POINTER_REGNUM
)
253 return stack_pointer_rtx
;
256 return gen_rtx_raw_REG (mode
, regno
);
260 gen_rtx_MEM (mode
, addr
)
261 enum machine_mode mode
;
264 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
266 /* This field is not cleared by the mere allocation of the rtx, so
268 MEM_ALIAS_SET (rt
) = 0;
273 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
275 ** This routine generates an RTX of the size specified by
276 ** <code>, which is an RTX code. The RTX structure is initialized
277 ** from the arguments <element1> through <elementn>, which are
278 ** interpreted according to the specific RTX type's format. The
279 ** special machine mode associated with the rtx (if any) is specified
282 ** gen_rtx can be invoked in a way which resembles the lisp-like
283 ** rtx it will generate. For example, the following rtx structure:
285 ** (plus:QI (mem:QI (reg:SI 1))
286 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
288 ** ...would be generated by the following C code:
290 ** gen_rtx (PLUS, QImode,
291 ** gen_rtx (MEM, QImode,
292 ** gen_rtx (REG, SImode, 1)),
293 ** gen_rtx (MEM, QImode,
294 ** gen_rtx (PLUS, SImode,
295 ** gen_rtx (REG, SImode, 2),
296 ** gen_rtx (REG, SImode, 3)))),
301 gen_rtx
VPROTO((enum rtx_code code
, enum machine_mode mode
, ...))
303 #ifndef ANSI_PROTOTYPES
305 enum machine_mode mode
;
308 register int i
; /* Array indices... */
309 register const char *fmt
; /* Current rtx's format... */
310 register rtx rt_val
; /* RTX to return to caller... */
314 #ifndef ANSI_PROTOTYPES
315 code
= va_arg (p
, enum rtx_code
);
316 mode
= va_arg (p
, enum machine_mode
);
322 rt_val
= gen_rtx_CONST_INT (mode
, va_arg (p
, HOST_WIDE_INT
));
327 rtx arg0
= va_arg (p
, rtx
);
328 HOST_WIDE_INT arg1
= va_arg (p
, HOST_WIDE_INT
);
329 HOST_WIDE_INT arg2
= va_arg (p
, HOST_WIDE_INT
);
330 rt_val
= gen_rtx_CONST_DOUBLE (mode
, arg0
, arg1
, arg2
);
335 rt_val
= gen_rtx_REG (mode
, va_arg (p
, int));
339 rt_val
= gen_rtx_MEM (mode
, va_arg (p
, rtx
));
343 rt_val
= rtx_alloc (code
); /* Allocate the storage space. */
344 rt_val
->mode
= mode
; /* Store the machine mode... */
346 fmt
= GET_RTX_FORMAT (code
); /* Find the right format... */
347 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
351 case '0': /* Unused field. */
354 case 'i': /* An integer? */
355 XINT (rt_val
, i
) = va_arg (p
, int);
358 case 'w': /* A wide integer? */
359 XWINT (rt_val
, i
) = va_arg (p
, HOST_WIDE_INT
);
362 case 's': /* A string? */
363 XSTR (rt_val
, i
) = va_arg (p
, char *);
366 case 'e': /* An expression? */
367 case 'u': /* An insn? Same except when printing. */
368 XEXP (rt_val
, i
) = va_arg (p
, rtx
);
371 case 'E': /* An RTX vector? */
372 XVEC (rt_val
, i
) = va_arg (p
, rtvec
);
375 case 'b': /* A bitmap? */
376 XBITMAP (rt_val
, i
) = va_arg (p
, bitmap
);
379 case 't': /* A tree? */
380 XTREE (rt_val
, i
) = va_arg (p
, tree
);
394 /* gen_rtvec (n, [rt1, ..., rtn])
396 ** This routine creates an rtvec and stores within it the
397 ** pointers to rtx's which are its arguments.
402 gen_rtvec
VPROTO((int n
, ...))
404 #ifndef ANSI_PROTOTYPES
413 #ifndef ANSI_PROTOTYPES
418 return NULL_RTVEC
; /* Don't allocate an empty rtvec... */
420 vector
= (rtx
*) alloca (n
* sizeof (rtx
));
422 for (i
= 0; i
< n
; i
++)
423 vector
[i
] = va_arg (p
, rtx
);
426 return gen_rtvec_v (n
, vector
);
430 gen_rtvec_v (n
, argp
)
435 register rtvec rt_val
;
438 return NULL_RTVEC
; /* Don't allocate an empty rtvec... */
440 rt_val
= rtvec_alloc (n
); /* Allocate an rtvec... */
442 for (i
= 0; i
< n
; i
++)
443 rt_val
->elem
[i
] = *argp
++;
449 /* Generate a REG rtx for a new pseudo register of mode MODE.
450 This pseudo is assigned the next sequential register number. */
454 enum machine_mode mode
;
456 struct function
*f
= current_function
;
459 /* Don't let anything called after initial flow analysis create new
464 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
465 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
)
467 /* For complex modes, don't make a single pseudo.
468 Instead, make a CONCAT of two pseudos.
469 This allows noncontiguous allocation of the real and imaginary parts,
470 which makes much better code. Besides, allocating DCmode
471 pseudos overstrains reload on some machines like the 386. */
472 rtx realpart
, imagpart
;
473 int size
= GET_MODE_UNIT_SIZE (mode
);
474 enum machine_mode partmode
475 = mode_for_size (size
* BITS_PER_UNIT
,
476 (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
477 ? MODE_FLOAT
: MODE_INT
),
480 realpart
= gen_reg_rtx (partmode
);
481 imagpart
= gen_reg_rtx (partmode
);
482 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
485 /* Make sure regno_pointer_flag and regno_reg_rtx are large
486 enough to have an element for this pseudo reg number. */
488 if (reg_rtx_no
== f
->emit
->regno_pointer_flag_length
)
490 int old_size
= f
->emit
->regno_pointer_flag_length
;
493 new = xrealloc (f
->emit
->regno_pointer_flag
, old_size
* 2);
494 memset (new + old_size
, 0, old_size
);
495 f
->emit
->regno_pointer_flag
= new;
497 new = xrealloc (f
->emit
->regno_pointer_align
, old_size
* 2);
498 memset (new + old_size
, 0, old_size
);
499 f
->emit
->regno_pointer_align
= new;
501 new1
= (rtx
*) xrealloc (f
->emit
->x_regno_reg_rtx
,
502 old_size
* 2 * sizeof (rtx
));
503 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
504 regno_reg_rtx
= new1
;
506 f
->emit
->regno_pointer_flag_length
= old_size
* 2;
509 val
= gen_rtx_raw_REG (mode
, reg_rtx_no
);
510 regno_reg_rtx
[reg_rtx_no
++] = val
;
514 /* Identify REG (which may be a CONCAT) as a user register. */
520 if (GET_CODE (reg
) == CONCAT
)
522 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
523 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
525 else if (GET_CODE (reg
) == REG
)
526 REG_USERVAR_P (reg
) = 1;
531 /* Identify REG as a probable pointer register and show its alignment
532 as ALIGN, if nonzero. */
535 mark_reg_pointer (reg
, align
)
539 if (! REGNO_POINTER_FLAG (REGNO (reg
)))
541 REGNO_POINTER_FLAG (REGNO (reg
)) = 1;
544 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
546 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
547 /* We can no-longer be sure just how aligned this pointer is */
548 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
551 /* Return 1 plus largest pseudo reg number used in the current function. */
559 /* Return 1 + the largest label number used so far in the current function. */
564 if (last_label_num
&& label_num
== base_label_num
)
565 return last_label_num
;
569 /* Return first label number used in this function (if any were used). */
572 get_first_label_num ()
574 return first_label_num
;
577 /* Return a value representing some low-order bits of X, where the number
578 of low-order bits is given by MODE. Note that no conversion is done
579 between floating-point and fixed-point values, rather, the bit
580 representation is returned.
582 This function handles the cases in common between gen_lowpart, below,
583 and two variants in cse.c and combine.c. These are the cases that can
584 be safely handled at all points in the compilation.
586 If this is not a case we can handle, return 0. */
589 gen_lowpart_common (mode
, x
)
590 enum machine_mode mode
;
595 if (GET_MODE (x
) == mode
)
598 /* MODE must occupy no more words than the mode of X. */
599 if (GET_MODE (x
) != VOIDmode
600 && ((GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
601 > ((GET_MODE_SIZE (GET_MODE (x
)) + (UNITS_PER_WORD
- 1))
605 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
606 word
= ((GET_MODE_SIZE (GET_MODE (x
))
607 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
610 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
611 && (GET_MODE_CLASS (mode
) == MODE_INT
612 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
614 /* If we are getting the low-order part of something that has been
615 sign- or zero-extended, we can either just use the object being
616 extended or make a narrower extension. If we want an even smaller
617 piece than the size of the object being extended, call ourselves
620 This case is used mostly by combine and cse. */
622 if (GET_MODE (XEXP (x
, 0)) == mode
)
624 else if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
625 return gen_lowpart_common (mode
, XEXP (x
, 0));
626 else if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
)))
627 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
629 else if (GET_CODE (x
) == SUBREG
630 && (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
631 || GET_MODE_SIZE (mode
) == GET_MODE_UNIT_SIZE (GET_MODE (x
))))
632 return (GET_MODE (SUBREG_REG (x
)) == mode
&& SUBREG_WORD (x
) == 0
634 : gen_rtx_SUBREG (mode
, SUBREG_REG (x
), SUBREG_WORD (x
) + word
));
635 else if (GET_CODE (x
) == REG
)
637 /* Let the backend decide how many registers to skip. This is needed
638 in particular for Sparc64 where fp regs are smaller than a word. */
639 /* ??? Note that subregs are now ambiguous, in that those against
640 pseudos are sized by the Word Size, while those against hard
641 regs are sized by the underlying register size. Better would be
642 to always interpret the subreg offset parameter as bytes or bits. */
644 if (WORDS_BIG_ENDIAN
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
645 word
= (HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
))
646 - HARD_REGNO_NREGS (REGNO (x
), mode
));
648 /* If the register is not valid for MODE, return 0. If we don't
649 do this, there is no way to fix up the resulting REG later.
650 But we do do this if the current REG is not valid for its
651 mode. This latter is a kludge, but is required due to the
652 way that parameters are passed on some machines, most
654 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
655 && ! HARD_REGNO_MODE_OK (REGNO (x
) + word
, mode
)
656 && HARD_REGNO_MODE_OK (REGNO (x
), GET_MODE (x
)))
658 else if (REGNO (x
) < FIRST_PSEUDO_REGISTER
659 /* integrate.c can't handle parts of a return value register. */
660 && (! REG_FUNCTION_VALUE_P (x
)
661 || ! rtx_equal_function_value_matters
)
662 #ifdef CLASS_CANNOT_CHANGE_SIZE
663 && ! (GET_MODE_SIZE (mode
) != GET_MODE_SIZE (GET_MODE (x
))
664 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_COMPLEX_INT
665 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_COMPLEX_FLOAT
666 && (TEST_HARD_REG_BIT
667 (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
670 /* We want to keep the stack, frame, and arg pointers
672 && x
!= frame_pointer_rtx
673 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
674 && x
!= arg_pointer_rtx
676 && x
!= stack_pointer_rtx
)
677 return gen_rtx_REG (mode
, REGNO (x
) + word
);
679 return gen_rtx_SUBREG (mode
, x
, word
);
681 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
682 from the low-order part of the constant. */
683 else if ((GET_MODE_CLASS (mode
) == MODE_INT
684 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
685 && GET_MODE (x
) == VOIDmode
686 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
688 /* If MODE is twice the host word size, X is already the desired
689 representation. Otherwise, if MODE is wider than a word, we can't
690 do this. If MODE is exactly a word, return just one CONST_INT.
691 If MODE is smaller than a word, clear the bits that don't belong
692 in our mode, unless they and our sign bit are all one. So we get
693 either a reasonable negative value or a reasonable unsigned value
696 if (GET_MODE_BITSIZE (mode
) >= 2 * HOST_BITS_PER_WIDE_INT
)
698 else if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
700 else if (GET_MODE_BITSIZE (mode
) == HOST_BITS_PER_WIDE_INT
)
701 return (GET_CODE (x
) == CONST_INT
? x
702 : GEN_INT (CONST_DOUBLE_LOW (x
)));
705 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
706 int width
= GET_MODE_BITSIZE (mode
);
707 HOST_WIDE_INT val
= (GET_CODE (x
) == CONST_INT
? INTVAL (x
)
708 : CONST_DOUBLE_LOW (x
));
710 /* Sign extend to HOST_WIDE_INT. */
711 val
= val
<< (HOST_BITS_PER_WIDE_INT
- width
) >> (HOST_BITS_PER_WIDE_INT
- width
);
713 return (GET_CODE (x
) == CONST_INT
&& INTVAL (x
) == val
? x
718 /* If X is an integral constant but we want it in floating-point, it
719 must be the case that we have a union of an integer and a floating-point
720 value. If the machine-parameters allow it, simulate that union here
721 and return the result. The two-word and single-word cases are
724 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
725 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
726 || flag_pretend_float
)
727 && GET_MODE_CLASS (mode
) == MODE_FLOAT
728 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
729 && GET_CODE (x
) == CONST_INT
730 && sizeof (float) * HOST_BITS_PER_CHAR
== HOST_BITS_PER_WIDE_INT
)
731 #ifdef REAL_ARITHMETIC
737 r
= REAL_VALUE_FROM_TARGET_SINGLE (i
);
738 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
742 union {HOST_WIDE_INT i
; float d
; } u
;
745 return CONST_DOUBLE_FROM_REAL_VALUE (u
.d
, mode
);
748 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
749 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
750 || flag_pretend_float
)
751 && GET_MODE_CLASS (mode
) == MODE_FLOAT
752 && GET_MODE_SIZE (mode
) == 2 * UNITS_PER_WORD
753 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
754 && GET_MODE (x
) == VOIDmode
755 && (sizeof (double) * HOST_BITS_PER_CHAR
756 == 2 * HOST_BITS_PER_WIDE_INT
))
757 #ifdef REAL_ARITHMETIC
761 HOST_WIDE_INT low
, high
;
763 if (GET_CODE (x
) == CONST_INT
)
764 low
= INTVAL (x
), high
= low
>> (HOST_BITS_PER_WIDE_INT
-1);
766 low
= CONST_DOUBLE_LOW (x
), high
= CONST_DOUBLE_HIGH (x
);
768 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
770 if (WORDS_BIG_ENDIAN
)
771 i
[0] = high
, i
[1] = low
;
773 i
[0] = low
, i
[1] = high
;
775 r
= REAL_VALUE_FROM_TARGET_DOUBLE (i
);
776 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
780 union {HOST_WIDE_INT i
[2]; double d
; } u
;
781 HOST_WIDE_INT low
, high
;
783 if (GET_CODE (x
) == CONST_INT
)
784 low
= INTVAL (x
), high
= low
>> (HOST_BITS_PER_WIDE_INT
-1);
786 low
= CONST_DOUBLE_LOW (x
), high
= CONST_DOUBLE_HIGH (x
);
788 #ifdef HOST_WORDS_BIG_ENDIAN
789 u
.i
[0] = high
, u
.i
[1] = low
;
791 u
.i
[0] = low
, u
.i
[1] = high
;
794 return CONST_DOUBLE_FROM_REAL_VALUE (u
.d
, mode
);
798 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
799 same as sizeof (double) or when sizeof (float) is larger than the
800 size of a word on the target machine. */
801 #ifdef REAL_ARITHMETIC
802 else if (mode
== SFmode
&& GET_CODE (x
) == CONST_INT
)
808 r
= REAL_VALUE_FROM_TARGET_SINGLE (i
);
809 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
811 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
812 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
813 || flag_pretend_float
)
814 && GET_MODE_CLASS (mode
) == MODE_FLOAT
815 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
816 && GET_CODE (x
) == CONST_INT
817 && (sizeof (double) * HOST_BITS_PER_CHAR
818 == HOST_BITS_PER_WIDE_INT
))
824 r
= REAL_VALUE_FROM_TARGET_DOUBLE (&i
);
825 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
829 /* Similarly, if this is converting a floating-point value into a
830 single-word integer. Only do this is the host and target parameters are
833 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
834 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
835 || flag_pretend_float
)
836 && (GET_MODE_CLASS (mode
) == MODE_INT
837 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
838 && GET_CODE (x
) == CONST_DOUBLE
839 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
840 && GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
)
841 return operand_subword (x
, word
, 0, GET_MODE (x
));
843 /* Similarly, if this is converting a floating-point value into a
844 two-word integer, we can do this one word at a time and make an
845 integer. Only do this is the host and target parameters are
848 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
849 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
850 || flag_pretend_float
)
851 && (GET_MODE_CLASS (mode
) == MODE_INT
852 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
853 && GET_CODE (x
) == CONST_DOUBLE
854 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
855 && GET_MODE_BITSIZE (mode
) == 2 * BITS_PER_WORD
)
858 = operand_subword (x
, word
+ WORDS_BIG_ENDIAN
, 0, GET_MODE (x
));
860 = operand_subword (x
, word
+ ! WORDS_BIG_ENDIAN
, 0, GET_MODE (x
));
862 if (lowpart
&& GET_CODE (lowpart
) == CONST_INT
863 && highpart
&& GET_CODE (highpart
) == CONST_INT
)
864 return immed_double_const (INTVAL (lowpart
), INTVAL (highpart
), mode
);
867 /* Otherwise, we can't do this. */
871 /* Return the real part (which has mode MODE) of a complex value X.
872 This always comes at the low address in memory. */
875 gen_realpart (mode
, x
)
876 enum machine_mode mode
;
879 if (GET_CODE (x
) == CONCAT
&& GET_MODE (XEXP (x
, 0)) == mode
)
881 else if (WORDS_BIG_ENDIAN
882 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
884 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
885 fatal ("Unable to access real part of complex value in a hard register on this target");
886 else if (WORDS_BIG_ENDIAN
)
887 return gen_highpart (mode
, x
);
889 return gen_lowpart (mode
, x
);
892 /* Return the imaginary part (which has mode MODE) of a complex value X.
893 This always comes at the high address in memory. */
896 gen_imagpart (mode
, x
)
897 enum machine_mode mode
;
900 if (GET_CODE (x
) == CONCAT
&& GET_MODE (XEXP (x
, 0)) == mode
)
902 else if (WORDS_BIG_ENDIAN
)
903 return gen_lowpart (mode
, x
);
904 else if (!WORDS_BIG_ENDIAN
905 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
907 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
908 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
910 return gen_highpart (mode
, x
);
913 /* Return 1 iff X, assumed to be a SUBREG,
914 refers to the real part of the complex value in its containing reg.
915 Complex values are always stored with the real part in the first word,
916 regardless of WORDS_BIG_ENDIAN. */
919 subreg_realpart_p (x
)
922 if (GET_CODE (x
) != SUBREG
)
925 return SUBREG_WORD (x
) * UNITS_PER_WORD
< GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x
)));
928 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
929 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
930 least-significant part of X.
931 MODE specifies how big a part of X to return;
932 it usually should not be larger than a word.
933 If X is a MEM whose address is a QUEUED, the value may be so also. */
936 gen_lowpart (mode
, x
)
937 enum machine_mode mode
;
940 rtx result
= gen_lowpart_common (mode
, x
);
944 else if (GET_CODE (x
) == REG
)
946 /* Must be a hard reg that's not valid in MODE. */
947 result
= gen_lowpart_common (mode
, copy_to_reg (x
));
952 else if (GET_CODE (x
) == MEM
)
954 /* The only additional case we can do is MEM. */
955 register int offset
= 0;
956 if (WORDS_BIG_ENDIAN
)
957 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
958 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
960 if (BYTES_BIG_ENDIAN
)
961 /* Adjust the address so that the address-after-the-data
963 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
964 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
966 return change_address (x
, mode
, plus_constant (XEXP (x
, 0), offset
));
968 else if (GET_CODE (x
) == ADDRESSOF
)
969 return gen_lowpart (mode
, force_reg (GET_MODE (x
), x
));
974 /* Like `gen_lowpart', but refer to the most significant part.
975 This is used to access the imaginary part of a complex number. */
978 gen_highpart (mode
, x
)
979 enum machine_mode mode
;
982 /* This case loses if X is a subreg. To catch bugs early,
983 complain if an invalid MODE is used even in other cases. */
984 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
985 && GET_MODE_SIZE (mode
) != GET_MODE_UNIT_SIZE (GET_MODE (x
)))
987 if (GET_CODE (x
) == CONST_DOUBLE
988 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
989 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
992 return GEN_INT (CONST_DOUBLE_HIGH (x
) & GET_MODE_MASK (mode
));
993 else if (GET_CODE (x
) == CONST_INT
)
995 if (HOST_BITS_PER_WIDE_INT
<= BITS_PER_WORD
)
997 return GEN_INT (INTVAL (x
) >> (HOST_BITS_PER_WIDE_INT
- BITS_PER_WORD
));
999 else if (GET_CODE (x
) == MEM
)
1001 register int offset
= 0;
1002 if (! WORDS_BIG_ENDIAN
)
1003 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
1004 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
1006 if (! BYTES_BIG_ENDIAN
1007 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
1008 offset
-= (GET_MODE_SIZE (mode
)
1009 - MIN (UNITS_PER_WORD
,
1010 GET_MODE_SIZE (GET_MODE (x
))));
1012 return change_address (x
, mode
, plus_constant (XEXP (x
, 0), offset
));
1014 else if (GET_CODE (x
) == SUBREG
)
1016 /* The only time this should occur is when we are looking at a
1017 multi-word item with a SUBREG whose mode is the same as that of the
1018 item. It isn't clear what we would do if it wasn't. */
1019 if (SUBREG_WORD (x
) != 0)
1021 return gen_highpart (mode
, SUBREG_REG (x
));
1023 else if (GET_CODE (x
) == REG
)
1027 /* Let the backend decide how many registers to skip. This is needed
1028 in particular for sparc64 where fp regs are smaller than a word. */
1029 /* ??? Note that subregs are now ambiguous, in that those against
1030 pseudos are sized by the word size, while those against hard
1031 regs are sized by the underlying register size. Better would be
1032 to always interpret the subreg offset parameter as bytes or bits. */
1034 if (WORDS_BIG_ENDIAN
)
1036 else if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1037 word
= (HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
))
1038 - HARD_REGNO_NREGS (REGNO (x
), mode
));
1040 word
= ((GET_MODE_SIZE (GET_MODE (x
))
1041 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
1044 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
1045 /* integrate.c can't handle parts of a return value register. */
1046 && (! REG_FUNCTION_VALUE_P (x
)
1047 || ! rtx_equal_function_value_matters
)
1048 /* We want to keep the stack, frame, and arg pointers special. */
1049 && x
!= frame_pointer_rtx
1050 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1051 && x
!= arg_pointer_rtx
1053 && x
!= stack_pointer_rtx
)
1054 return gen_rtx_REG (mode
, REGNO (x
) + word
);
1056 return gen_rtx_SUBREG (mode
, x
, word
);
1062 /* Return 1 iff X, assumed to be a SUBREG,
1063 refers to the least significant part of its containing reg.
1064 If X is not a SUBREG, always return 1 (it is its own low part!). */
1067 subreg_lowpart_p (x
)
1070 if (GET_CODE (x
) != SUBREG
)
1072 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1075 if (WORDS_BIG_ENDIAN
1076 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
)
1077 return (SUBREG_WORD (x
)
1078 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
1079 - MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
))
1082 return SUBREG_WORD (x
) == 0;
1085 /* Return subword I of operand OP.
1086 The word number, I, is interpreted as the word number starting at the
1087 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1088 otherwise it is the high-order word.
1090 If we cannot extract the required word, we return zero. Otherwise, an
1091 rtx corresponding to the requested word will be returned.
1093 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1094 reload has completed, a valid address will always be returned. After
1095 reload, if a valid address cannot be returned, we return zero.
1097 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1098 it is the responsibility of the caller.
1100 MODE is the mode of OP in case it is a CONST_INT. */
1103 operand_subword (op
, i
, validate_address
, mode
)
1106 int validate_address
;
1107 enum machine_mode mode
;
1110 int size_ratio
= HOST_BITS_PER_WIDE_INT
/ BITS_PER_WORD
;
1112 if (mode
== VOIDmode
)
1113 mode
= GET_MODE (op
);
1115 if (mode
== VOIDmode
)
1118 /* If OP is narrower than a word, fail. */
1120 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1123 /* If we want a word outside OP, return zero. */
1125 && (i
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1128 /* If OP is already an integer word, return it. */
1129 if (GET_MODE_CLASS (mode
) == MODE_INT
1130 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
)
1133 /* If OP is a REG or SUBREG, we can handle it very simply. */
1134 if (GET_CODE (op
) == REG
)
1136 /* ??? There is a potential problem with this code. It does not
1137 properly handle extractions of a subword from a hard register
1138 that is larger than word_mode. Presumably the check for
1139 HARD_REGNO_MODE_OK catches these most of these cases. */
1141 /* If OP is a hard register, but OP + I is not a hard register,
1142 then extracting a subword is impossible.
1144 For example, consider if OP is the last hard register and it is
1145 larger than word_mode. If we wanted word N (for N > 0) because a
1146 part of that hard register was known to contain a useful value,
1147 then OP + I would refer to a pseudo, not the hard register we
1149 if (REGNO (op
) < FIRST_PSEUDO_REGISTER
1150 && REGNO (op
) + i
>= FIRST_PSEUDO_REGISTER
)
1153 /* If the register is not valid for MODE, return 0. Note we
1154 have to check both OP and OP + I since they may refer to
1155 different parts of the register file.
1157 Consider if OP refers to the last 96bit FP register and we want
1158 subword 3 because that subword is known to contain a value we
1160 if (REGNO (op
) < FIRST_PSEUDO_REGISTER
1161 && (! HARD_REGNO_MODE_OK (REGNO (op
), word_mode
)
1162 || ! HARD_REGNO_MODE_OK (REGNO (op
) + i
, word_mode
)))
1164 else if (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1165 || (REG_FUNCTION_VALUE_P (op
)
1166 && rtx_equal_function_value_matters
)
1167 /* We want to keep the stack, frame, and arg pointers
1169 || op
== frame_pointer_rtx
1170 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1171 || op
== arg_pointer_rtx
1173 || op
== stack_pointer_rtx
)
1174 return gen_rtx_SUBREG (word_mode
, op
, i
);
1176 return gen_rtx_REG (word_mode
, REGNO (op
) + i
);
1178 else if (GET_CODE (op
) == SUBREG
)
1179 return gen_rtx_SUBREG (word_mode
, SUBREG_REG (op
), i
+ SUBREG_WORD (op
));
1180 else if (GET_CODE (op
) == CONCAT
)
1182 int partwords
= GET_MODE_UNIT_SIZE (GET_MODE (op
)) / UNITS_PER_WORD
;
1184 return operand_subword (XEXP (op
, 0), i
, validate_address
, mode
);
1185 return operand_subword (XEXP (op
, 1), i
- partwords
,
1186 validate_address
, mode
);
1189 /* Form a new MEM at the requested address. */
1190 if (GET_CODE (op
) == MEM
)
1192 rtx addr
= plus_constant (XEXP (op
, 0), i
* UNITS_PER_WORD
);
1195 if (validate_address
)
1197 if (reload_completed
)
1199 if (! strict_memory_address_p (word_mode
, addr
))
1203 addr
= memory_address (word_mode
, addr
);
1206 new = gen_rtx_MEM (word_mode
, addr
);
1208 MEM_COPY_ATTRIBUTES (new, op
);
1209 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1210 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op
);
1215 /* The only remaining cases are when OP is a constant. If the host and
1216 target floating formats are the same, handling two-word floating
1217 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1218 are defined as returning one or two 32 bit values, respectively,
1219 and not values of BITS_PER_WORD bits. */
1220 #ifdef REAL_ARITHMETIC
1221 /* The output is some bits, the width of the target machine's word.
1222 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1224 if (HOST_BITS_PER_WIDE_INT
>= BITS_PER_WORD
1225 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1226 && GET_MODE_BITSIZE (mode
) == 64
1227 && GET_CODE (op
) == CONST_DOUBLE
)
1232 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1233 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1235 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1236 which the words are written depends on the word endianness.
1237 ??? This is a potential portability problem and should
1238 be fixed at some point.
1240 We must excercise caution with the sign bit. By definition there
1241 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1242 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1243 So we explicitly mask and sign-extend as necessary. */
1244 if (BITS_PER_WORD
== 32)
1247 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1248 return GEN_INT (val
);
1250 #if HOST_BITS_PER_WIDE_INT >= 64
1251 else if (BITS_PER_WORD
>= 64 && i
== 0)
1253 val
= k
[! WORDS_BIG_ENDIAN
];
1254 val
= (((val
& 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1255 val
|= (HOST_WIDE_INT
) k
[WORDS_BIG_ENDIAN
] & 0xffffffff;
1256 return GEN_INT (val
);
1259 else if (BITS_PER_WORD
== 16)
1262 if ((i
& 1) == !WORDS_BIG_ENDIAN
)
1265 return GEN_INT (val
);
1270 else if (HOST_BITS_PER_WIDE_INT
>= BITS_PER_WORD
1271 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1272 && GET_MODE_BITSIZE (mode
) > 64
1273 && GET_CODE (op
) == CONST_DOUBLE
)
1278 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1279 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1281 if (BITS_PER_WORD
== 32)
1284 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 return GEN_INT (val
);
1290 #else /* no REAL_ARITHMETIC */
1291 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1292 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1293 || flag_pretend_float
)
1294 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1295 && GET_MODE_SIZE (mode
) == 2 * UNITS_PER_WORD
1296 && GET_CODE (op
) == CONST_DOUBLE
)
1298 /* The constant is stored in the host's word-ordering,
1299 but we want to access it in the target's word-ordering. Some
1300 compilers don't like a conditional inside macro args, so we have two
1301 copies of the return. */
1302 #ifdef HOST_WORDS_BIG_ENDIAN
1303 return GEN_INT (i
== WORDS_BIG_ENDIAN
1304 ? CONST_DOUBLE_HIGH (op
) : CONST_DOUBLE_LOW (op
));
1306 return GEN_INT (i
!= WORDS_BIG_ENDIAN
1307 ? CONST_DOUBLE_HIGH (op
) : CONST_DOUBLE_LOW (op
));
1310 #endif /* no REAL_ARITHMETIC */
1312 /* Single word float is a little harder, since single- and double-word
1313 values often do not have the same high-order bits. We have already
1314 verified that we want the only defined word of the single-word value. */
1315 #ifdef REAL_ARITHMETIC
1316 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1317 && GET_MODE_BITSIZE (mode
) == 32
1318 && GET_CODE (op
) == CONST_DOUBLE
)
1323 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1324 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1326 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1328 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1330 if (BITS_PER_WORD
== 16)
1332 if ((i
& 1) == !WORDS_BIG_ENDIAN
)
1337 return GEN_INT (val
);
1340 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1341 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1342 || flag_pretend_float
)
1343 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1344 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1345 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
1346 && GET_CODE (op
) == CONST_DOUBLE
)
1349 union {float f
; HOST_WIDE_INT i
; } u
;
1351 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
1354 return GEN_INT (u
.i
);
1356 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1357 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1358 || flag_pretend_float
)
1359 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1360 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1361 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
1362 && GET_CODE (op
) == CONST_DOUBLE
)
1365 union {double d
; HOST_WIDE_INT i
; } u
;
1367 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
1370 return GEN_INT (u
.i
);
1372 #endif /* no REAL_ARITHMETIC */
1374 /* The only remaining cases that we can handle are integers.
1375 Convert to proper endianness now since these cases need it.
1376 At this point, i == 0 means the low-order word.
1378 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1379 in general. However, if OP is (const_int 0), we can just return
1382 if (op
== const0_rtx
)
1385 if (GET_MODE_CLASS (mode
) != MODE_INT
1386 || (GET_CODE (op
) != CONST_INT
&& GET_CODE (op
) != CONST_DOUBLE
)
1387 || BITS_PER_WORD
> HOST_BITS_PER_WIDE_INT
)
1390 if (WORDS_BIG_ENDIAN
)
1391 i
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
- 1 - i
;
1393 /* Find out which word on the host machine this value is in and get
1394 it from the constant. */
1395 val
= (i
/ size_ratio
== 0
1396 ? (GET_CODE (op
) == CONST_INT
? INTVAL (op
) : CONST_DOUBLE_LOW (op
))
1397 : (GET_CODE (op
) == CONST_INT
1398 ? (INTVAL (op
) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op
)));
1400 /* Get the value we want into the low bits of val. */
1401 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
)
1402 val
= ((val
>> ((i
% size_ratio
) * BITS_PER_WORD
)));
1404 val
= trunc_int_for_mode (val
, word_mode
);
1406 return GEN_INT (val
);
1409 /* Similar to `operand_subword', but never return 0. If we can't extract
1410 the required subword, put OP into a register and try again. If that fails,
1411 abort. We always validate the address in this case. It is not valid
1412 to call this function after reload; it is mostly meant for RTL
1415 MODE is the mode of OP, in case it is CONST_INT. */
1418 operand_subword_force (op
, i
, mode
)
1421 enum machine_mode mode
;
1423 rtx result
= operand_subword (op
, i
, 1, mode
);
1428 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1430 /* If this is a register which can not be accessed by words, copy it
1431 to a pseudo register. */
1432 if (GET_CODE (op
) == REG
)
1433 op
= copy_to_reg (op
);
1435 op
= force_reg (mode
, op
);
1438 result
= operand_subword (op
, i
, 1, mode
);
1445 /* Given a compare instruction, swap the operands.
1446 A test instruction is changed into a compare of 0 against the operand. */
1449 reverse_comparison (insn
)
1452 rtx body
= PATTERN (insn
);
1455 if (GET_CODE (body
) == SET
)
1456 comp
= SET_SRC (body
);
1458 comp
= SET_SRC (XVECEXP (body
, 0, 0));
1460 if (GET_CODE (comp
) == COMPARE
)
1462 rtx op0
= XEXP (comp
, 0);
1463 rtx op1
= XEXP (comp
, 1);
1464 XEXP (comp
, 0) = op1
;
1465 XEXP (comp
, 1) = op0
;
1469 rtx
new = gen_rtx_COMPARE (VOIDmode
,
1470 CONST0_RTX (GET_MODE (comp
)), comp
);
1471 if (GET_CODE (body
) == SET
)
1472 SET_SRC (body
) = new;
1474 SET_SRC (XVECEXP (body
, 0, 0)) = new;
1478 /* Return a memory reference like MEMREF, but with its mode changed
1479 to MODE and its address changed to ADDR.
1480 (VOIDmode means don't change the mode.
1481 NULL for ADDR means don't change the address.) */
1484 change_address (memref
, mode
, addr
)
1486 enum machine_mode mode
;
1491 if (GET_CODE (memref
) != MEM
)
1493 if (mode
== VOIDmode
)
1494 mode
= GET_MODE (memref
);
1496 addr
= XEXP (memref
, 0);
1498 /* If reload is in progress or has completed, ADDR must be valid.
1499 Otherwise, we can call memory_address to make it valid. */
1500 if (reload_completed
|| reload_in_progress
)
1502 if (! memory_address_p (mode
, addr
))
1506 addr
= memory_address (mode
, addr
);
1508 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
1511 new = gen_rtx_MEM (mode
, addr
);
1512 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref
);
1513 MEM_COPY_ATTRIBUTES (new, memref
);
1514 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref
);
1518 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1525 label
= gen_rtx_CODE_LABEL (VOIDmode
, 0, NULL_RTX
,
1526 NULL_RTX
, label_num
++, NULL_PTR
, NULL_PTR
);
1528 LABEL_NUSES (label
) = 0;
1529 LABEL_ALTERNATE_NAME (label
) = NULL
;
1533 /* For procedure integration. */
1535 /* Install new pointers to the first and last insns in the chain.
1536 Also, set cur_insn_uid to one higher than the last in use.
1537 Used for an inline-procedure after copying the insn chain. */
1540 set_new_first_and_last_insn (first
, last
)
1549 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1550 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
1555 /* Set the range of label numbers found in the current function.
1556 This is used when belatedly compiling an inline function. */
1559 set_new_first_and_last_label_num (first
, last
)
1562 base_label_num
= label_num
;
1563 first_label_num
= first
;
1564 last_label_num
= last
;
1567 /* Set the last label number found in the current function.
1568 This is used when belatedly compiling an inline function. */
1571 set_new_last_label_num (last
)
1574 base_label_num
= label_num
;
1575 last_label_num
= last
;
1578 /* Restore all variables describing the current status from the structure *P.
1579 This is used after a nested function. */
1582 restore_emit_status (p
)
1586 clear_emit_caches ();
1589 /* Clear out all parts of the state in F that can safely be discarded
1590 after the function has been compiled, to let garbage collection
1591 reclaim the memory. */
1594 free_emit_status (f
)
1597 free (f
->emit
->x_regno_reg_rtx
);
1598 free (f
->emit
->regno_pointer_flag
);
1599 free (f
->emit
->regno_pointer_align
);
1604 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1605 It does not work to do this twice, because the mark bits set here
1606 are not cleared afterwards. */
1609 unshare_all_rtl (insn
)
1612 for (; insn
; insn
= NEXT_INSN (insn
))
1613 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
1614 || GET_CODE (insn
) == CALL_INSN
)
1616 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
1617 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
1618 LOG_LINKS (insn
) = copy_rtx_if_shared (LOG_LINKS (insn
));
1621 /* Make sure the addresses of stack slots found outside the insn chain
1622 (such as, in DECL_RTL of a variable) are not shared
1623 with the insn chain.
1625 This special care is necessary when the stack slot MEM does not
1626 actually appear in the insn chain. If it does appear, its address
1627 is unshared from all else at that point. */
1629 copy_rtx_if_shared (stack_slot_list
);
1632 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1633 Recursively does the same for subexpressions. */
1636 copy_rtx_if_shared (orig
)
1639 register rtx x
= orig
;
1641 register enum rtx_code code
;
1642 register const char *format_ptr
;
1648 code
= GET_CODE (x
);
1650 /* These types may be freely shared. */
1663 /* SCRATCH must be shared because they represent distinct values. */
1667 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1668 a LABEL_REF, it isn't sharable. */
1669 if (GET_CODE (XEXP (x
, 0)) == PLUS
1670 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1671 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
1680 /* The chain of insns is not being copied. */
1684 /* A MEM is allowed to be shared if its address is constant.
1686 We used to allow sharing of MEMs which referenced
1687 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1688 that can lose. instantiate_virtual_regs will not unshare
1689 the MEMs, and combine may change the structure of the address
1690 because it looks safe and profitable in one context, but
1691 in some other context it creates unrecognizable RTL. */
1692 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
1701 /* This rtx may not be shared. If it has already been seen,
1702 replace it with a copy of itself. */
1708 copy
= rtx_alloc (code
);
1709 bcopy ((char *) x
, (char *) copy
,
1710 (sizeof (*copy
) - sizeof (copy
->fld
)
1711 + sizeof (copy
->fld
[0]) * GET_RTX_LENGTH (code
)));
1717 /* Now scan the subexpressions recursively.
1718 We can store any replaced subexpressions directly into X
1719 since we know X is not shared! Any vectors in X
1720 must be copied if X was copied. */
1722 format_ptr
= GET_RTX_FORMAT (code
);
1724 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1726 switch (*format_ptr
++)
1729 XEXP (x
, i
) = copy_rtx_if_shared (XEXP (x
, i
));
1733 if (XVEC (x
, i
) != NULL
)
1736 int len
= XVECLEN (x
, i
);
1738 if (copied
&& len
> 0)
1739 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
1740 for (j
= 0; j
< len
; j
++)
1741 XVECEXP (x
, i
, j
) = copy_rtx_if_shared (XVECEXP (x
, i
, j
));
1749 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1750 to look for shared sub-parts. */
1753 reset_used_flags (x
)
1757 register enum rtx_code code
;
1758 register const char *format_ptr
;
1763 code
= GET_CODE (x
);
1765 /* These types may be freely shared so we needn't do any resetting
1786 /* The chain of insns is not being copied. */
1795 format_ptr
= GET_RTX_FORMAT (code
);
1796 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1798 switch (*format_ptr
++)
1801 reset_used_flags (XEXP (x
, i
));
1805 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1806 reset_used_flags (XVECEXP (x
, i
, j
));
1812 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1813 Return X or the rtx for the pseudo reg the value of X was copied into.
1814 OTHER must be valid as a SET_DEST. */
1817 make_safe_from (x
, other
)
1821 switch (GET_CODE (other
))
1824 other
= SUBREG_REG (other
);
1826 case STRICT_LOW_PART
:
1829 other
= XEXP (other
, 0);
1835 if ((GET_CODE (other
) == MEM
1837 && GET_CODE (x
) != REG
1838 && GET_CODE (x
) != SUBREG
)
1839 || (GET_CODE (other
) == REG
1840 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
1841 || reg_mentioned_p (other
, x
))))
1843 rtx temp
= gen_reg_rtx (GET_MODE (x
));
1844 emit_move_insn (temp
, x
);
1850 /* Emission of insns (adding them to the doubly-linked list). */
1852 /* Return the first insn of the current sequence or current function. */
1860 /* Return the last insn emitted in current sequence or current function. */
1868 /* Specify a new insn as the last in the chain. */
1871 set_last_insn (insn
)
1874 if (NEXT_INSN (insn
) != 0)
1879 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1882 get_last_insn_anywhere ()
1884 struct sequence_stack
*stack
;
1887 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
1888 if (stack
->last
!= 0)
1893 /* Return a number larger than any instruction's uid in this function. */
1898 return cur_insn_uid
;
1901 /* Renumber instructions so that no instruction UIDs are wasted. */
1904 renumber_insns (stream
)
1908 int old_max_uid
= cur_insn_uid
;
1910 /* If we're not supposed to renumber instructions, don't. */
1911 if (!flag_renumber_insns
)
1914 /* If there aren't that many instructions, then it's not really
1915 worth renumbering them. */
1916 if (flag_renumber_insns
== 1 && get_max_uid () < 25000)
1921 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1924 fprintf (stream
, "Renumbering insn %d to %d\n",
1925 INSN_UID (insn
), cur_insn_uid
);
1926 INSN_UID (insn
) = cur_insn_uid
++;
1930 /* Return the next insn. If it is a SEQUENCE, return the first insn
1939 insn
= NEXT_INSN (insn
);
1940 if (insn
&& GET_CODE (insn
) == INSN
1941 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1942 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1948 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1952 previous_insn (insn
)
1957 insn
= PREV_INSN (insn
);
1958 if (insn
&& GET_CODE (insn
) == INSN
1959 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1960 insn
= XVECEXP (PATTERN (insn
), 0, XVECLEN (PATTERN (insn
), 0) - 1);
1966 /* Return the next insn after INSN that is not a NOTE. This routine does not
1967 look inside SEQUENCEs. */
1970 next_nonnote_insn (insn
)
1975 insn
= NEXT_INSN (insn
);
1976 if (insn
== 0 || GET_CODE (insn
) != NOTE
)
1983 /* Return the previous insn before INSN that is not a NOTE. This routine does
1984 not look inside SEQUENCEs. */
1987 prev_nonnote_insn (insn
)
1992 insn
= PREV_INSN (insn
);
1993 if (insn
== 0 || GET_CODE (insn
) != NOTE
)
2000 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2001 or 0, if there is none. This routine does not look inside
2005 next_real_insn (insn
)
2010 insn
= NEXT_INSN (insn
);
2011 if (insn
== 0 || GET_CODE (insn
) == INSN
2012 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
)
2019 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2020 or 0, if there is none. This routine does not look inside
2024 prev_real_insn (insn
)
2029 insn
= PREV_INSN (insn
);
2030 if (insn
== 0 || GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
2031 || GET_CODE (insn
) == JUMP_INSN
)
2038 /* Find the next insn after INSN that really does something. This routine
2039 does not look inside SEQUENCEs. Until reload has completed, this is the
2040 same as next_real_insn. */
2043 next_active_insn (insn
)
2048 insn
= NEXT_INSN (insn
);
2050 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
2051 || (GET_CODE (insn
) == INSN
2052 && (! reload_completed
2053 || (GET_CODE (PATTERN (insn
)) != USE
2054 && GET_CODE (PATTERN (insn
)) != CLOBBER
))))
2061 /* Find the last insn before INSN that really does something. This routine
2062 does not look inside SEQUENCEs. Until reload has completed, this is the
2063 same as prev_real_insn. */
2066 prev_active_insn (insn
)
2071 insn
= PREV_INSN (insn
);
2073 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
2074 || (GET_CODE (insn
) == INSN
2075 && (! reload_completed
2076 || (GET_CODE (PATTERN (insn
)) != USE
2077 && GET_CODE (PATTERN (insn
)) != CLOBBER
))))
2084 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2092 insn
= NEXT_INSN (insn
);
2093 if (insn
== 0 || GET_CODE (insn
) == CODE_LABEL
)
2100 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2108 insn
= PREV_INSN (insn
);
2109 if (insn
== 0 || GET_CODE (insn
) == CODE_LABEL
)
2117 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2118 and REG_CC_USER notes so we can find it. */
2121 link_cc0_insns (insn
)
2124 rtx user
= next_nonnote_insn (insn
);
2126 if (GET_CODE (user
) == INSN
&& GET_CODE (PATTERN (user
)) == SEQUENCE
)
2127 user
= XVECEXP (PATTERN (user
), 0, 0);
2129 REG_NOTES (user
) = gen_rtx_INSN_LIST (REG_CC_SETTER
, insn
,
2131 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_CC_USER
, user
, REG_NOTES (insn
));
2134 /* Return the next insn that uses CC0 after INSN, which is assumed to
2135 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2136 applied to the result of this function should yield INSN).
2138 Normally, this is simply the next insn. However, if a REG_CC_USER note
2139 is present, it contains the insn that uses CC0.
2141 Return 0 if we can't find the insn. */
2144 next_cc0_user (insn
)
2147 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
2150 return XEXP (note
, 0);
2152 insn
= next_nonnote_insn (insn
);
2153 if (insn
&& GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2154 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2156 if (insn
&& GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
2157 && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
2163 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2164 note, it is the previous insn. */
2167 prev_cc0_setter (insn
)
2170 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2173 return XEXP (note
, 0);
2175 insn
= prev_nonnote_insn (insn
);
2176 if (! sets_cc0_p (PATTERN (insn
)))
2183 /* Try splitting insns that can be split for better scheduling.
2184 PAT is the pattern which might split.
2185 TRIAL is the insn providing PAT.
2186 LAST is non-zero if we should return the last insn of the sequence produced.
2188 If this routine succeeds in splitting, it returns the first or last
2189 replacement insn depending on the value of LAST. Otherwise, it
2190 returns TRIAL. If the insn to be returned can be split, it will be. */
2193 try_split (pat
, trial
, last
)
2197 rtx before
= PREV_INSN (trial
);
2198 rtx after
= NEXT_INSN (trial
);
2199 rtx seq
= split_insns (pat
, trial
);
2200 int has_barrier
= 0;
2203 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2204 We may need to handle this specially. */
2205 if (after
&& GET_CODE (after
) == BARRIER
)
2208 after
= NEXT_INSN (after
);
2213 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2214 The latter case will normally arise only when being done so that
2215 it, in turn, will be split (SFmode on the 29k is an example). */
2216 if (GET_CODE (seq
) == SEQUENCE
)
2218 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2219 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2220 increment the usage count so we don't delete the label. */
2223 if (GET_CODE (trial
) == JUMP_INSN
)
2224 for (i
= XVECLEN (seq
, 0) - 1; i
>= 0; i
--)
2225 if (GET_CODE (XVECEXP (seq
, 0, i
)) == JUMP_INSN
)
2227 JUMP_LABEL (XVECEXP (seq
, 0, i
)) = JUMP_LABEL (trial
);
2229 if (JUMP_LABEL (trial
))
2230 LABEL_NUSES (JUMP_LABEL (trial
))++;
2233 tem
= emit_insn_after (seq
, before
);
2235 delete_insn (trial
);
2237 emit_barrier_after (tem
);
2239 /* Recursively call try_split for each new insn created; by the
2240 time control returns here that insn will be fully split, so
2241 set LAST and continue from the insn after the one returned.
2242 We can't use next_active_insn here since AFTER may be a note.
2243 Ignore deleted insns, which can be occur if not optimizing. */
2244 for (tem
= NEXT_INSN (before
); tem
!= after
;
2245 tem
= NEXT_INSN (tem
))
2246 if (! INSN_DELETED_P (tem
)
2247 && GET_RTX_CLASS (GET_CODE (tem
)) == 'i')
2248 tem
= try_split (PATTERN (tem
), tem
, 1);
2250 /* Avoid infinite loop if the result matches the original pattern. */
2251 else if (rtx_equal_p (seq
, pat
))
2255 PATTERN (trial
) = seq
;
2256 INSN_CODE (trial
) = -1;
2257 try_split (seq
, trial
, last
);
2260 /* Return either the first or the last insn, depending on which was
2262 return last
? prev_active_insn (after
) : next_active_insn (before
);
2268 /* Make and return an INSN rtx, initializing all its slots.
2269 Store PATTERN in the pattern slots. */
2272 make_insn_raw (pattern
)
2277 /* If in RTL generation phase, see if FREE_INSN can be used. */
2278 if (!ggc_p
&& free_insn
!= 0 && rtx_equal_function_value_matters
)
2281 free_insn
= NEXT_INSN (free_insn
);
2282 PUT_CODE (insn
, INSN
);
2285 insn
= rtx_alloc (INSN
);
2287 INSN_UID (insn
) = cur_insn_uid
++;
2288 PATTERN (insn
) = pattern
;
2289 INSN_CODE (insn
) = -1;
2290 LOG_LINKS (insn
) = NULL
;
2291 REG_NOTES (insn
) = NULL
;
2296 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2299 make_jump_insn_raw (pattern
)
2304 insn
= rtx_alloc (JUMP_INSN
);
2305 INSN_UID (insn
) = cur_insn_uid
++;
2307 PATTERN (insn
) = pattern
;
2308 INSN_CODE (insn
) = -1;
2309 LOG_LINKS (insn
) = NULL
;
2310 REG_NOTES (insn
) = NULL
;
2311 JUMP_LABEL (insn
) = NULL
;
2316 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2319 make_call_insn_raw (pattern
)
2324 insn
= rtx_alloc (CALL_INSN
);
2325 INSN_UID (insn
) = cur_insn_uid
++;
2327 PATTERN (insn
) = pattern
;
2328 INSN_CODE (insn
) = -1;
2329 LOG_LINKS (insn
) = NULL
;
2330 REG_NOTES (insn
) = NULL
;
2331 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
2336 /* Add INSN to the end of the doubly-linked list.
2337 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2343 PREV_INSN (insn
) = last_insn
;
2344 NEXT_INSN (insn
) = 0;
2346 if (NULL
!= last_insn
)
2347 NEXT_INSN (last_insn
) = insn
;
2349 if (NULL
== first_insn
)
2355 /* Add INSN into the doubly-linked list after insn AFTER. This and
2356 the next should be the only functions called to insert an insn once
2357 delay slots have been filled since only they know how to update a
2361 add_insn_after (insn
, after
)
2364 rtx next
= NEXT_INSN (after
);
2366 if (optimize
&& INSN_DELETED_P (after
))
2369 NEXT_INSN (insn
) = next
;
2370 PREV_INSN (insn
) = after
;
2374 PREV_INSN (next
) = insn
;
2375 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2376 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = insn
;
2378 else if (last_insn
== after
)
2382 struct sequence_stack
*stack
= seq_stack
;
2383 /* Scan all pending sequences too. */
2384 for (; stack
; stack
= stack
->next
)
2385 if (after
== stack
->last
)
2395 NEXT_INSN (after
) = insn
;
2396 if (GET_CODE (after
) == INSN
&& GET_CODE (PATTERN (after
)) == SEQUENCE
)
2398 rtx sequence
= PATTERN (after
);
2399 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
2403 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2404 the previous should be the only functions called to insert an insn once
2405 delay slots have been filled since only they know how to update a
2409 add_insn_before (insn
, before
)
2412 rtx prev
= PREV_INSN (before
);
2414 if (optimize
&& INSN_DELETED_P (before
))
2417 PREV_INSN (insn
) = prev
;
2418 NEXT_INSN (insn
) = before
;
2422 NEXT_INSN (prev
) = insn
;
2423 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2425 rtx sequence
= PATTERN (prev
);
2426 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
2429 else if (first_insn
== before
)
2433 struct sequence_stack
*stack
= seq_stack
;
2434 /* Scan all pending sequences too. */
2435 for (; stack
; stack
= stack
->next
)
2436 if (before
== stack
->first
)
2438 stack
->first
= insn
;
2446 PREV_INSN (before
) = insn
;
2447 if (GET_CODE (before
) == INSN
&& GET_CODE (PATTERN (before
)) == SEQUENCE
)
2448 PREV_INSN (XVECEXP (PATTERN (before
), 0, 0)) = insn
;
2451 /* Remove an insn from its doubly-linked list. This function knows how
2452 to handle sequences. */
2457 rtx next
= NEXT_INSN (insn
);
2458 rtx prev
= PREV_INSN (insn
);
2461 NEXT_INSN (prev
) = next
;
2462 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2464 rtx sequence
= PATTERN (prev
);
2465 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
2468 else if (first_insn
== insn
)
2472 struct sequence_stack
*stack
= seq_stack
;
2473 /* Scan all pending sequences too. */
2474 for (; stack
; stack
= stack
->next
)
2475 if (insn
== stack
->first
)
2477 stack
->first
= next
;
2487 PREV_INSN (next
) = prev
;
2488 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2489 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
2491 else if (last_insn
== insn
)
2495 struct sequence_stack
*stack
= seq_stack
;
2496 /* Scan all pending sequences too. */
2497 for (; stack
; stack
= stack
->next
)
2498 if (insn
== stack
->last
)
2509 /* Delete all insns made since FROM.
2510 FROM becomes the new last instruction. */
2513 delete_insns_since (from
)
2519 NEXT_INSN (from
) = 0;
2523 /* This function is deprecated, please use sequences instead.
2525 Move a consecutive bunch of insns to a different place in the chain.
2526 The insns to be moved are those between FROM and TO.
2527 They are moved to a new position after the insn AFTER.
2528 AFTER must not be FROM or TO or any insn in between.
2530 This function does not know about SEQUENCEs and hence should not be
2531 called after delay-slot filling has been done. */
2534 reorder_insns (from
, to
, after
)
2535 rtx from
, to
, after
;
2537 /* Splice this bunch out of where it is now. */
2538 if (PREV_INSN (from
))
2539 NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
2541 PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
2542 if (last_insn
== to
)
2543 last_insn
= PREV_INSN (from
);
2544 if (first_insn
== from
)
2545 first_insn
= NEXT_INSN (to
);
2547 /* Make the new neighbors point to it and it to them. */
2548 if (NEXT_INSN (after
))
2549 PREV_INSN (NEXT_INSN (after
)) = to
;
2551 NEXT_INSN (to
) = NEXT_INSN (after
);
2552 PREV_INSN (from
) = after
;
2553 NEXT_INSN (after
) = from
;
2554 if (after
== last_insn
)
2558 /* Return the line note insn preceding INSN. */
2561 find_line_note (insn
)
2564 if (no_line_numbers
)
2567 for (; insn
; insn
= PREV_INSN (insn
))
2568 if (GET_CODE (insn
) == NOTE
2569 && NOTE_LINE_NUMBER (insn
) >= 0)
2575 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2576 of the moved insns when debugging. This may insert a note between AFTER
2577 and FROM, and another one after TO. */
2580 reorder_insns_with_line_notes (from
, to
, after
)
2581 rtx from
, to
, after
;
2583 rtx from_line
= find_line_note (from
);
2584 rtx after_line
= find_line_note (after
);
2586 reorder_insns (from
, to
, after
);
2588 if (from_line
== after_line
)
2592 emit_line_note_after (NOTE_SOURCE_FILE (from_line
),
2593 NOTE_LINE_NUMBER (from_line
),
2596 emit_line_note_after (NOTE_SOURCE_FILE (after_line
),
2597 NOTE_LINE_NUMBER (after_line
),
2601 /* Remove unncessary notes from the instruction stream. */
2604 remove_unncessary_notes ()
2609 /* Remove NOTE_INSN_DELETED notes. We must not remove the first
2610 instruction in the function because the compiler depends on the
2611 first instruction being a note. */
2612 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= next
)
2614 /* Remember what's next. */
2615 next
= NEXT_INSN (insn
);
2617 /* We're only interested in notes. */
2618 if (GET_CODE (insn
) != NOTE
)
2621 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
2627 /* Emit an insn of given code and pattern
2628 at a specified place within the doubly-linked list. */
2630 /* Make an instruction with body PATTERN
2631 and output it before the instruction BEFORE. */
2634 emit_insn_before (pattern
, before
)
2635 register rtx pattern
, before
;
2637 register rtx insn
= before
;
2639 if (GET_CODE (pattern
) == SEQUENCE
)
2643 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2645 insn
= XVECEXP (pattern
, 0, i
);
2646 add_insn_before (insn
, before
);
2648 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2649 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2653 insn
= make_insn_raw (pattern
);
2654 add_insn_before (insn
, before
);
2660 /* Similar to emit_insn_before, but update basic block boundaries as well. */
2663 emit_block_insn_before (pattern
, before
, block
)
2664 rtx pattern
, before
;
2667 rtx prev
= PREV_INSN (before
);
2668 rtx r
= emit_insn_before (pattern
, before
);
2669 if (block
&& block
->head
== before
)
2670 block
->head
= NEXT_INSN (prev
);
2674 /* Make an instruction with body PATTERN and code JUMP_INSN
2675 and output it before the instruction BEFORE. */
2678 emit_jump_insn_before (pattern
, before
)
2679 register rtx pattern
, before
;
2683 if (GET_CODE (pattern
) == SEQUENCE
)
2684 insn
= emit_insn_before (pattern
, before
);
2687 insn
= make_jump_insn_raw (pattern
);
2688 add_insn_before (insn
, before
);
2694 /* Make an instruction with body PATTERN and code CALL_INSN
2695 and output it before the instruction BEFORE. */
2698 emit_call_insn_before (pattern
, before
)
2699 register rtx pattern
, before
;
2703 if (GET_CODE (pattern
) == SEQUENCE
)
2704 insn
= emit_insn_before (pattern
, before
);
2707 insn
= make_call_insn_raw (pattern
);
2708 add_insn_before (insn
, before
);
2709 PUT_CODE (insn
, CALL_INSN
);
2715 /* Make an insn of code BARRIER
2716 and output it before the insn BEFORE. */
2719 emit_barrier_before (before
)
2720 register rtx before
;
2722 register rtx insn
= rtx_alloc (BARRIER
);
2724 INSN_UID (insn
) = cur_insn_uid
++;
2726 add_insn_before (insn
, before
);
2730 /* Emit the label LABEL before the insn BEFORE. */
2733 emit_label_before (label
, before
)
2736 /* This can be called twice for the same label as a result of the
2737 confusion that follows a syntax error! So make it harmless. */
2738 if (INSN_UID (label
) == 0)
2740 INSN_UID (label
) = cur_insn_uid
++;
2741 add_insn_before (label
, before
);
2747 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2750 emit_note_before (subtype
, before
)
2754 register rtx note
= rtx_alloc (NOTE
);
2755 INSN_UID (note
) = cur_insn_uid
++;
2756 NOTE_SOURCE_FILE (note
) = 0;
2757 NOTE_LINE_NUMBER (note
) = subtype
;
2759 add_insn_before (note
, before
);
2763 /* Make an insn of code INSN with body PATTERN
2764 and output it after the insn AFTER. */
2767 emit_insn_after (pattern
, after
)
2768 register rtx pattern
, after
;
2770 register rtx insn
= after
;
2772 if (GET_CODE (pattern
) == SEQUENCE
)
2776 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2778 insn
= XVECEXP (pattern
, 0, i
);
2779 add_insn_after (insn
, after
);
2782 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2783 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2787 insn
= make_insn_raw (pattern
);
2788 add_insn_after (insn
, after
);
2794 /* Similar to emit_insn_after, except that line notes are to be inserted so
2795 as to act as if this insn were at FROM. */
2798 emit_insn_after_with_line_notes (pattern
, after
, from
)
2799 rtx pattern
, after
, from
;
2801 rtx from_line
= find_line_note (from
);
2802 rtx after_line
= find_line_note (after
);
2803 rtx insn
= emit_insn_after (pattern
, after
);
2806 emit_line_note_after (NOTE_SOURCE_FILE (from_line
),
2807 NOTE_LINE_NUMBER (from_line
),
2811 emit_line_note_after (NOTE_SOURCE_FILE (after_line
),
2812 NOTE_LINE_NUMBER (after_line
),
2816 /* Similar to emit_insn_after, but update basic block boundaries as well. */
2819 emit_block_insn_after (pattern
, after
, block
)
2823 rtx r
= emit_insn_after (pattern
, after
);
2824 if (block
&& block
->end
== after
)
2829 /* Make an insn of code JUMP_INSN with body PATTERN
2830 and output it after the insn AFTER. */
2833 emit_jump_insn_after (pattern
, after
)
2834 register rtx pattern
, after
;
2838 if (GET_CODE (pattern
) == SEQUENCE
)
2839 insn
= emit_insn_after (pattern
, after
);
2842 insn
= make_jump_insn_raw (pattern
);
2843 add_insn_after (insn
, after
);
2849 /* Make an insn of code BARRIER
2850 and output it after the insn AFTER. */
2853 emit_barrier_after (after
)
2856 register rtx insn
= rtx_alloc (BARRIER
);
2858 INSN_UID (insn
) = cur_insn_uid
++;
2860 add_insn_after (insn
, after
);
2864 /* Emit the label LABEL after the insn AFTER. */
2867 emit_label_after (label
, after
)
2870 /* This can be called twice for the same label
2871 as a result of the confusion that follows a syntax error!
2872 So make it harmless. */
2873 if (INSN_UID (label
) == 0)
2875 INSN_UID (label
) = cur_insn_uid
++;
2876 add_insn_after (label
, after
);
2882 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2885 emit_note_after (subtype
, after
)
2889 register rtx note
= rtx_alloc (NOTE
);
2890 INSN_UID (note
) = cur_insn_uid
++;
2891 NOTE_SOURCE_FILE (note
) = 0;
2892 NOTE_LINE_NUMBER (note
) = subtype
;
2893 add_insn_after (note
, after
);
2897 /* Emit a line note for FILE and LINE after the insn AFTER. */
2900 emit_line_note_after (file
, line
, after
)
2907 if (no_line_numbers
&& line
> 0)
2913 note
= rtx_alloc (NOTE
);
2914 INSN_UID (note
) = cur_insn_uid
++;
2915 NOTE_SOURCE_FILE (note
) = file
;
2916 NOTE_LINE_NUMBER (note
) = line
;
2917 add_insn_after (note
, after
);
2921 /* Make an insn of code INSN with pattern PATTERN
2922 and add it to the end of the doubly-linked list.
2923 If PATTERN is a SEQUENCE, take the elements of it
2924 and emit an insn for each element.
2926 Returns the last insn emitted. */
2932 rtx insn
= last_insn
;
2934 if (GET_CODE (pattern
) == SEQUENCE
)
2938 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2940 insn
= XVECEXP (pattern
, 0, i
);
2943 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2944 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2948 insn
= make_insn_raw (pattern
);
2955 /* Emit the insns in a chain starting with INSN.
2956 Return the last insn emitted. */
2966 rtx next
= NEXT_INSN (insn
);
2975 /* Emit the insns in a chain starting with INSN and place them in front of
2976 the insn BEFORE. Return the last insn emitted. */
2979 emit_insns_before (insn
, before
)
2987 rtx next
= NEXT_INSN (insn
);
2988 add_insn_before (insn
, before
);
2996 /* Emit the insns in a chain starting with FIRST and place them in back of
2997 the insn AFTER. Return the last insn emitted. */
3000 emit_insns_after (first
, after
)
3005 register rtx after_after
;
3013 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
3016 after_after
= NEXT_INSN (after
);
3018 NEXT_INSN (after
) = first
;
3019 PREV_INSN (first
) = after
;
3020 NEXT_INSN (last
) = after_after
;
3022 PREV_INSN (after_after
) = last
;
3024 if (after
== last_insn
)
3029 /* Make an insn of code JUMP_INSN with pattern PATTERN
3030 and add it to the end of the doubly-linked list. */
3033 emit_jump_insn (pattern
)
3036 if (GET_CODE (pattern
) == SEQUENCE
)
3037 return emit_insn (pattern
);
3040 register rtx insn
= make_jump_insn_raw (pattern
);
3046 /* Make an insn of code CALL_INSN with pattern PATTERN
3047 and add it to the end of the doubly-linked list. */
3050 emit_call_insn (pattern
)
3053 if (GET_CODE (pattern
) == SEQUENCE
)
3054 return emit_insn (pattern
);
3057 register rtx insn
= make_call_insn_raw (pattern
);
3059 PUT_CODE (insn
, CALL_INSN
);
3064 /* Add the label LABEL to the end of the doubly-linked list. */
3070 /* This can be called twice for the same label
3071 as a result of the confusion that follows a syntax error!
3072 So make it harmless. */
3073 if (INSN_UID (label
) == 0)
3075 INSN_UID (label
) = cur_insn_uid
++;
3081 /* Make an insn of code BARRIER
3082 and add it to the end of the doubly-linked list. */
3087 register rtx barrier
= rtx_alloc (BARRIER
);
3088 INSN_UID (barrier
) = cur_insn_uid
++;
3093 /* Make an insn of code NOTE
3094 with data-fields specified by FILE and LINE
3095 and add it to the end of the doubly-linked list,
3096 but only if line-numbers are desired for debugging info. */
3099 emit_line_note (file
, line
)
3103 set_file_and_line_for_stmt (file
, line
);
3106 if (no_line_numbers
)
3110 return emit_note (file
, line
);
3113 /* Make an insn of code NOTE
3114 with data-fields specified by FILE and LINE
3115 and add it to the end of the doubly-linked list.
3116 If it is a line-number NOTE, omit it if it matches the previous one. */
3119 emit_note (file
, line
)
3127 if (file
&& last_filename
&& !strcmp (file
, last_filename
)
3128 && line
== last_linenum
)
3130 last_filename
= file
;
3131 last_linenum
= line
;
3134 if (no_line_numbers
&& line
> 0)
3140 note
= rtx_alloc (NOTE
);
3141 INSN_UID (note
) = cur_insn_uid
++;
3142 NOTE_SOURCE_FILE (note
) = file
;
3143 NOTE_LINE_NUMBER (note
) = line
;
3148 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3151 emit_line_note_force (file
, line
)
3156 return emit_line_note (file
, line
);
3159 /* Cause next statement to emit a line note even if the line number
3160 has not changed. This is used at the beginning of a function. */
3163 force_next_line_note ()
3168 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3169 note of this type already exists, remove it first. */
3172 set_unique_reg_note (insn
, kind
, datum
)
3177 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
3179 /* First remove the note if there already is one. */
3181 remove_note (insn
, note
);
3183 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (kind
, datum
, REG_NOTES (insn
));
3186 /* Return an indication of which type of insn should have X as a body.
3187 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3193 if (GET_CODE (x
) == CODE_LABEL
)
3195 if (GET_CODE (x
) == CALL
)
3197 if (GET_CODE (x
) == RETURN
)
3199 if (GET_CODE (x
) == SET
)
3201 if (SET_DEST (x
) == pc_rtx
)
3203 else if (GET_CODE (SET_SRC (x
)) == CALL
)
3208 if (GET_CODE (x
) == PARALLEL
)
3211 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
3212 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
3214 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
3215 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
3217 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
3218 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
3224 /* Emit the rtl pattern X as an appropriate kind of insn.
3225 If X is a label, it is simply added into the insn chain. */
3231 enum rtx_code code
= classify_insn (x
);
3233 if (code
== CODE_LABEL
)
3234 return emit_label (x
);
3235 else if (code
== INSN
)
3236 return emit_insn (x
);
3237 else if (code
== JUMP_INSN
)
3239 register rtx insn
= emit_jump_insn (x
);
3240 if (simplejump_p (insn
) || GET_CODE (x
) == RETURN
)
3241 return emit_barrier ();
3244 else if (code
== CALL_INSN
)
3245 return emit_call_insn (x
);
3250 /* Begin emitting insns to a sequence which can be packaged in an
3251 RTL_EXPR. If this sequence will contain something that might cause
3252 the compiler to pop arguments to function calls (because those
3253 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3254 details), use do_pending_stack_adjust before calling this function.
3255 That will ensure that the deferred pops are not accidentally
3256 emitted in the middel of this sequence. */
3261 struct sequence_stack
*tem
;
3263 tem
= (struct sequence_stack
*) xmalloc (sizeof (struct sequence_stack
));
3265 tem
->next
= seq_stack
;
3266 tem
->first
= first_insn
;
3267 tem
->last
= last_insn
;
3268 tem
->sequence_rtl_expr
= seq_rtl_expr
;
3276 /* Similarly, but indicate that this sequence will be placed in T, an
3277 RTL_EXPR. See the documentation for start_sequence for more
3278 information about how to use this function. */
3281 start_sequence_for_rtl_expr (t
)
3289 /* Set up the insn chain starting with FIRST as the current sequence,
3290 saving the previously current one. See the documentation for
3291 start_sequence for more information about how to use this function. */
3294 push_to_sequence (first
)
3301 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
));
3307 /* Set up the outer-level insn chain
3308 as the current sequence, saving the previously current one. */
3311 push_topmost_sequence ()
3313 struct sequence_stack
*stack
, *top
= NULL
;
3317 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3320 first_insn
= top
->first
;
3321 last_insn
= top
->last
;
3322 seq_rtl_expr
= top
->sequence_rtl_expr
;
3325 /* After emitting to the outer-level insn chain, update the outer-level
3326 insn chain, and restore the previous saved state. */
3329 pop_topmost_sequence ()
3331 struct sequence_stack
*stack
, *top
= NULL
;
3333 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3336 top
->first
= first_insn
;
3337 top
->last
= last_insn
;
3338 /* ??? Why don't we save seq_rtl_expr here? */
3343 /* After emitting to a sequence, restore previous saved state.
3345 To get the contents of the sequence just made, you must call
3346 `gen_sequence' *before* calling here.
3348 If the compiler might have deferred popping arguments while
3349 generating this sequence, and this sequence will not be immediately
3350 inserted into the instruction stream, use do_pending_stack_adjust
3351 before calling gen_sequence. That will ensure that the deferred
3352 pops are inserted into this sequence, and not into some random
3353 location in the instruction stream. See INHIBIT_DEFER_POP for more
3354 information about deferred popping of arguments. */
3359 struct sequence_stack
*tem
= seq_stack
;
3361 first_insn
= tem
->first
;
3362 last_insn
= tem
->last
;
3363 seq_rtl_expr
= tem
->sequence_rtl_expr
;
3364 seq_stack
= tem
->next
;
3369 /* Return 1 if currently emitting into a sequence. */
3374 return seq_stack
!= 0;
3377 /* Generate a SEQUENCE rtx containing the insns already emitted
3378 to the current sequence.
3380 This is how the gen_... function from a DEFINE_EXPAND
3381 constructs the SEQUENCE that it returns. */
3391 /* Count the insns in the chain. */
3393 for (tem
= first_insn
; tem
; tem
= NEXT_INSN (tem
))
3396 /* If only one insn, return it rather than a SEQUENCE.
3397 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3398 the case of an empty list.)
3399 We only return the pattern of an insn if its code is INSN and it
3400 has no notes. This ensures that no information gets lost. */
3402 && ! RTX_FRAME_RELATED_P (first_insn
)
3403 && GET_CODE (first_insn
) == INSN
3404 /* Don't throw away any reg notes. */
3405 && REG_NOTES (first_insn
) == 0)
3409 NEXT_INSN (first_insn
) = free_insn
;
3410 free_insn
= first_insn
;
3412 return PATTERN (first_insn
);
3415 /* Put them in a vector. See if we already have a SEQUENCE of the
3416 appropriate length around. */
3417 if (!ggc_p
&& len
< SEQUENCE_RESULT_SIZE
3418 && (result
= sequence_result
[len
]) != 0)
3419 sequence_result
[len
] = 0;
3422 /* Ensure that this rtl goes in saveable_obstack, since we may
3424 push_obstacks_nochange ();
3425 rtl_in_saveable_obstack ();
3426 result
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (len
));
3430 for (i
= 0, tem
= first_insn
; tem
; tem
= NEXT_INSN (tem
), i
++)
3431 XVECEXP (result
, 0, i
) = tem
;
3436 /* Put the various virtual registers into REGNO_REG_RTX. */
3439 init_virtual_regs (es
)
3440 struct emit_status
*es
;
3442 rtx
*ptr
= es
->x_regno_reg_rtx
;
3443 ptr
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
3444 ptr
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
3445 ptr
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
3446 ptr
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
3447 ptr
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
3451 clear_emit_caches ()
3455 /* Clear the start_sequence/gen_sequence cache. */
3456 for (i
= 0; i
< SEQUENCE_RESULT_SIZE
; i
++)
3457 sequence_result
[i
] = 0;
3461 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3462 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
3463 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
3464 static int copy_insn_n_scratches
;
3466 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3467 copied an ASM_OPERANDS.
3468 In that case, it is the original input-operand vector. */
3469 static rtvec orig_asm_operands_vector
;
3471 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
3472 copied an ASM_OPERANDS.
3473 In that case, it is the copied input-operand vector. */
3474 static rtvec copy_asm_operands_vector
;
3476 /* Likewise for the constraints vector. */
3477 static rtvec orig_asm_constraints_vector
;
3478 static rtvec copy_asm_constraints_vector
;
3480 /* Recursively create a new copy of an rtx for copy_insn.
3481 This function differs from copy_rtx in that it handles SCRATCHes and
3482 ASM_OPERANDs properly.
3483 Normally, this function is not used directly; use copy_insn as front end.
3484 However, you could first copy an insn pattern with copy_insn and then use
3485 this function afterwards to properly copy any REG_NOTEs containing
3494 register RTX_CODE code
;
3495 register const char *format_ptr
;
3497 code
= GET_CODE (orig
);
3513 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
3514 if (copy_insn_scratch_in
[i
] == orig
)
3515 return copy_insn_scratch_out
[i
];
3519 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3520 a LABEL_REF, it isn't sharable. */
3521 if (GET_CODE (XEXP (orig
, 0)) == PLUS
3522 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
3523 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
3527 /* A MEM with a constant address is not sharable. The problem is that
3528 the constant address may need to be reloaded. If the mem is shared,
3529 then reloading one copy of this mem will cause all copies to appear
3530 to have been reloaded. */
3536 copy
= rtx_alloc (code
);
3538 /* Copy the various flags, and other information. We assume that
3539 all fields need copying, and then clear the fields that should
3540 not be copied. That is the sensible default behavior, and forces
3541 us to explicitly document why we are *not* copying a flag. */
3542 memcpy (copy
, orig
, sizeof (struct rtx_def
) - sizeof (rtunion
));
3544 /* We do not copy the USED flag, which is used as a mark bit during
3545 walks over the RTL. */
3548 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3549 if (GET_RTX_CLASS (code
) == 'i')
3553 copy
->frame_related
= 0;
3556 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
3558 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
3560 copy
->fld
[i
] = orig
->fld
[i
];
3561 switch (*format_ptr
++)
3564 if (XEXP (orig
, i
) != NULL
)
3565 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
3570 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
3571 XVEC (copy
, i
) = copy_asm_constraints_vector
;
3572 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
3573 XVEC (copy
, i
) = copy_asm_operands_vector
;
3574 else if (XVEC (orig
, i
) != NULL
)
3576 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
3577 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
3578 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
3584 bitmap new_bits
= BITMAP_OBSTACK_ALLOC (rtl_obstack
);
3585 bitmap_copy (new_bits
, XBITMAP (orig
, i
));
3586 XBITMAP (copy
, i
) = new_bits
;
3597 /* These are left unchanged. */
3605 if (code
== SCRATCH
)
3607 i
= copy_insn_n_scratches
++;
3608 if (i
>= MAX_RECOG_OPERANDS
)
3610 copy_insn_scratch_in
[i
] = orig
;
3611 copy_insn_scratch_out
[i
] = copy
;
3613 else if (code
== ASM_OPERANDS
)
3615 orig_asm_operands_vector
= XVEC (orig
, 3);
3616 copy_asm_operands_vector
= XVEC (copy
, 3);
3617 orig_asm_constraints_vector
= XVEC (orig
, 4);
3618 copy_asm_constraints_vector
= XVEC (copy
, 4);
3624 /* Create a new copy of an rtx.
3625 This function differs from copy_rtx in that it handles SCRATCHes and
3626 ASM_OPERANDs properly.
3627 INSN doesn't really have to be a full INSN; it could be just the
3633 copy_insn_n_scratches
= 0;
3634 orig_asm_operands_vector
= 0;
3635 orig_asm_constraints_vector
= 0;
3636 copy_asm_operands_vector
= 0;
3637 copy_asm_constraints_vector
= 0;
3638 return copy_insn_1 (insn
);
3641 /* Initialize data structures and variables in this file
3642 before generating rtl for each function. */
3647 struct function
*f
= current_function
;
3649 f
->emit
= (struct emit_status
*) xmalloc (sizeof (struct emit_status
));
3652 seq_rtl_expr
= NULL
;
3654 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
3657 first_label_num
= label_num
;
3661 clear_emit_caches ();
3663 /* Init the tables that describe all the pseudo regs. */
3665 f
->emit
->regno_pointer_flag_length
= LAST_VIRTUAL_REGISTER
+ 101;
3667 f
->emit
->regno_pointer_flag
3668 = (char *) xcalloc (f
->emit
->regno_pointer_flag_length
, sizeof (char));
3670 f
->emit
->regno_pointer_align
3671 = (char *) xcalloc (f
->emit
->regno_pointer_flag_length
,
3675 = (rtx
*) xcalloc (f
->emit
->regno_pointer_flag_length
* sizeof (rtx
),
3678 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3679 init_virtual_regs (f
->emit
);
3681 /* Indicate that the virtual registers and stack locations are
3683 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM
) = 1;
3684 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM
) = 1;
3685 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM
) = 1;
3686 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM
) = 1;
3688 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM
) = 1;
3689 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM
) = 1;
3690 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM
) = 1;
3691 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM
) = 1;
3692 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM
) = 1;
3694 #ifdef STACK_BOUNDARY
3695 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3696 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3697 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
)
3698 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3699 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3701 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
)
3702 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3703 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
)
3704 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3705 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
)
3706 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3707 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
)
3708 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3709 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = UNITS_PER_WORD
;
3712 #ifdef INIT_EXPANDERS
3717 /* Mark SS for GC. */
3720 mark_sequence_stack (ss
)
3721 struct sequence_stack
*ss
;
3725 ggc_mark_rtx (ss
->first
);
3726 ggc_mark_tree (ss
->sequence_rtl_expr
);
3731 /* Mark ES for GC. */
3734 mark_emit_status (es
)
3735 struct emit_status
*es
;
3743 for (i
= es
->regno_pointer_flag_length
, r
= es
->x_regno_reg_rtx
;
3747 mark_sequence_stack (es
->sequence_stack
);
3748 ggc_mark_tree (es
->sequence_rtl_expr
);
3749 ggc_mark_rtx (es
->x_first_insn
);
3752 /* Create some permanent unique rtl objects shared between all functions.
3753 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3756 init_emit_once (line_numbers
)
3760 enum machine_mode mode
;
3761 enum machine_mode double_mode
;
3763 no_line_numbers
= ! line_numbers
;
3765 /* Compute the word and byte modes. */
3767 byte_mode
= VOIDmode
;
3768 word_mode
= VOIDmode
;
3769 double_mode
= VOIDmode
;
3771 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3772 mode
= GET_MODE_WIDER_MODE (mode
))
3774 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
3775 && byte_mode
== VOIDmode
)
3778 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
3779 && word_mode
== VOIDmode
)
3783 #ifndef DOUBLE_TYPE_SIZE
3784 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3787 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
3788 mode
= GET_MODE_WIDER_MODE (mode
))
3790 if (GET_MODE_BITSIZE (mode
) == DOUBLE_TYPE_SIZE
3791 && double_mode
== VOIDmode
)
3795 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
3797 /* Assign register numbers to the globally defined register rtx.
3798 This must be done at runtime because the register number field
3799 is in a union and some compilers can't initialize unions. */
3801 pc_rtx
= gen_rtx (PC
, VOIDmode
);
3802 cc0_rtx
= gen_rtx (CC0
, VOIDmode
);
3803 stack_pointer_rtx
= gen_rtx_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
3804 frame_pointer_rtx
= gen_rtx_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
3805 if (hard_frame_pointer_rtx
== 0)
3806 hard_frame_pointer_rtx
= gen_rtx_raw_REG (Pmode
,
3807 HARD_FRAME_POINTER_REGNUM
);
3808 if (arg_pointer_rtx
== 0)
3809 arg_pointer_rtx
= gen_rtx_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
3810 virtual_incoming_args_rtx
=
3811 gen_rtx_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
3812 virtual_stack_vars_rtx
=
3813 gen_rtx_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
3814 virtual_stack_dynamic_rtx
=
3815 gen_rtx_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
3816 virtual_outgoing_args_rtx
=
3817 gen_rtx_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
3818 virtual_cfa_rtx
= gen_rtx_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
3820 /* These rtx must be roots if GC is enabled. */
3822 ggc_add_rtx_root (global_rtl
, GR_MAX
);
3824 #ifdef INIT_EXPANDERS
3825 /* This is to initialize save_machine_status and restore_machine_status before
3826 the first call to push_function_context_to. This is needed by the Chill
3827 front end which calls push_function_context_to before the first cal to
3828 init_function_start. */
3832 /* Create the unique rtx's for certain rtx codes and operand values. */
3834 /* Don't use gen_rtx here since gen_rtx in this case
3835 tries to use these variables. */
3836 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
3837 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
3838 gen_rtx_raw_CONST_INT (VOIDmode
, i
);
3840 ggc_add_rtx_root (const_int_rtx
, 2 * MAX_SAVED_CONST_INT
+ 1);
3842 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
3843 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
3844 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
3846 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
3848 dconst0
= REAL_VALUE_ATOF ("0", double_mode
);
3849 dconst1
= REAL_VALUE_ATOF ("1", double_mode
);
3850 dconst2
= REAL_VALUE_ATOF ("2", double_mode
);
3851 dconstm1
= REAL_VALUE_ATOF ("-1", double_mode
);
3853 for (i
= 0; i
<= 2; i
++)
3855 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
3856 mode
= GET_MODE_WIDER_MODE (mode
))
3858 rtx tem
= rtx_alloc (CONST_DOUBLE
);
3859 union real_extract u
;
3861 bzero ((char *) &u
, sizeof u
); /* Zero any holes in a structure. */
3862 u
.d
= i
== 0 ? dconst0
: i
== 1 ? dconst1
: dconst2
;
3864 bcopy ((char *) &u
, (char *) &CONST_DOUBLE_LOW (tem
), sizeof u
);
3865 CONST_DOUBLE_MEM (tem
) = cc0_rtx
;
3866 PUT_MODE (tem
, mode
);
3868 const_tiny_rtx
[i
][(int) mode
] = tem
;
3871 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
3873 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3874 mode
= GET_MODE_WIDER_MODE (mode
))
3875 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
3877 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT
);
3879 mode
= GET_MODE_WIDER_MODE (mode
))
3880 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
3883 for (mode
= CCmode
; mode
< MAX_MACHINE_MODE
; ++mode
)
3884 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3885 const_tiny_rtx
[0][(int) mode
] = const0_rtx
;
3887 ggc_add_rtx_root (&const_tiny_rtx
[0][0], sizeof(const_tiny_rtx
)/sizeof(rtx
));
3888 ggc_add_rtx_root (&const_true_rtx
, 1);
3890 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3891 return_address_pointer_rtx
3892 = gen_rtx_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
3896 struct_value_rtx
= STRUCT_VALUE
;
3898 struct_value_rtx
= gen_rtx_REG (Pmode
, STRUCT_VALUE_REGNUM
);
3901 #ifdef STRUCT_VALUE_INCOMING
3902 struct_value_incoming_rtx
= STRUCT_VALUE_INCOMING
;
3904 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3905 struct_value_incoming_rtx
3906 = gen_rtx_REG (Pmode
, STRUCT_VALUE_INCOMING_REGNUM
);
3908 struct_value_incoming_rtx
= struct_value_rtx
;
3912 #ifdef STATIC_CHAIN_REGNUM
3913 static_chain_rtx
= gen_rtx_REG (Pmode
, STATIC_CHAIN_REGNUM
);
3915 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3916 if (STATIC_CHAIN_INCOMING_REGNUM
!= STATIC_CHAIN_REGNUM
)
3917 static_chain_incoming_rtx
3918 = gen_rtx_REG (Pmode
, STATIC_CHAIN_INCOMING_REGNUM
);
3921 static_chain_incoming_rtx
= static_chain_rtx
;
3925 static_chain_rtx
= STATIC_CHAIN
;
3927 #ifdef STATIC_CHAIN_INCOMING
3928 static_chain_incoming_rtx
= STATIC_CHAIN_INCOMING
;
3930 static_chain_incoming_rtx
= static_chain_rtx
;
3934 #ifdef PIC_OFFSET_TABLE_REGNUM
3935 pic_offset_table_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
3938 ggc_add_rtx_root (&pic_offset_table_rtx
, 1);
3939 ggc_add_rtx_root (&struct_value_rtx
, 1);
3940 ggc_add_rtx_root (&struct_value_incoming_rtx
, 1);
3941 ggc_add_rtx_root (&static_chain_rtx
, 1);
3942 ggc_add_rtx_root (&static_chain_incoming_rtx
, 1);
3943 ggc_add_rtx_root (&return_address_pointer_rtx
, 1);
3946 /* Query and clear/ restore no_line_numbers. This is used by the
3947 switch / case handling in stmt.c to give proper line numbers in
3948 warnings about unreachable code. */
3951 force_line_numbers ()
3953 int old
= no_line_numbers
;
3955 no_line_numbers
= 0;
3957 force_next_line_note ();
3962 restore_line_number_status (old_value
)
3965 no_line_numbers
= old_value
;