1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* Middle-to-low level generation of rtx code and insns.
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
55 /* Commonly used modes. */
57 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
58 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
59 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
60 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
63 /* This is *not* reset after each function. It gives each CODE_LABEL
64 in the entire compilation a unique label number. */
66 static int label_num
= 1;
68 /* Highest label number in current function.
69 Zero means use the value of label_num instead.
70 This is nonzero only when belatedly compiling an inline function. */
72 static int last_label_num
;
74 /* Value label_num had when set_new_first_and_last_label_number was called.
75 If label_num has not changed since then, last_label_num is valid. */
77 static int base_label_num
;
79 /* Nonzero means do not generate NOTEs for source line numbers. */
81 static int no_line_numbers
;
83 /* Commonly used rtx's, so that we only need space for one copy.
84 These are initialized once for the entire compilation.
85 All of these except perhaps the floating-point CONST_DOUBLEs
86 are unique; no other rtx-object will be equal to any of these. */
88 rtx global_rtl
[GR_MAX
];
90 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
91 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
92 record a copy of const[012]_rtx. */
94 rtx const_tiny_rtx
[3][(int) MAX_MACHINE_MODE
];
98 REAL_VALUE_TYPE dconst0
;
99 REAL_VALUE_TYPE dconst1
;
100 REAL_VALUE_TYPE dconst2
;
101 REAL_VALUE_TYPE dconstm1
;
103 /* All references to the following fixed hard registers go through
104 these unique rtl objects. On machines where the frame-pointer and
105 arg-pointer are the same register, they use the same unique object.
107 After register allocation, other rtl objects which used to be pseudo-regs
108 may be clobbered to refer to the frame-pointer register.
109 But references that were originally to the frame-pointer can be
110 distinguished from the others because they contain frame_pointer_rtx.
112 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
113 tricky: until register elimination has taken place hard_frame_pointer_rtx
114 should be used if it is being set, and frame_pointer_rtx otherwise. After
115 register elimination hard_frame_pointer_rtx should always be used.
116 On machines where the two registers are same (most) then these are the
119 In an inline procedure, the stack and frame pointer rtxs may not be
120 used for anything else. */
121 rtx struct_value_rtx
; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
122 rtx struct_value_incoming_rtx
; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
123 rtx static_chain_rtx
; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
124 rtx static_chain_incoming_rtx
; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
125 rtx pic_offset_table_rtx
; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
127 /* This is used to implement __builtin_return_address for some machines.
128 See for instance the MIPS port. */
129 rtx return_address_pointer_rtx
; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
131 /* We make one copy of (const_int C) where C is in
132 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
133 to save space during the compilation and simplify comparisons of
136 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
138 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
139 shortly thrown away. We use two mechanisms to prevent this waste:
141 For sizes up to 5 elements, we keep a SEQUENCE and its associated
142 rtvec for use by gen_sequence. One entry for each size is
143 sufficient because most cases are calls to gen_sequence followed by
144 immediately emitting the SEQUENCE. Reuse is safe since emitting a
145 sequence is destructive on the insn in it anyway and hence can't be
148 We do not bother to save this cached data over nested function calls.
149 Instead, we just reinitialize them. */
151 #define SEQUENCE_RESULT_SIZE 5
153 static rtx sequence_result
[SEQUENCE_RESULT_SIZE
];
155 /* During RTL generation, we also keep a list of free INSN rtl codes. */
156 static rtx free_insn
;
158 #define first_insn (current_function->emit->x_first_insn)
159 #define last_insn (current_function->emit->x_last_insn)
160 #define cur_insn_uid (current_function->emit->x_cur_insn_uid)
161 #define last_linenum (current_function->emit->x_last_linenum)
162 #define last_filename (current_function->emit->x_last_filename)
163 #define first_label_num (current_function->emit->x_first_label_num)
165 static rtx make_jump_insn_raw
PROTO((rtx
));
166 static rtx make_call_insn_raw
PROTO((rtx
));
167 static rtx find_line_note
PROTO((rtx
));
168 static void mark_sequence_stack
PROTO((struct sequence_stack
*));
170 /* There are some RTL codes that require special attention; the generation
171 functions do the raw handling. If you add to this list, modify
172 special_rtx in gengenrtl.c as well. */
175 gen_rtx_CONST_INT (mode
, arg
)
176 enum machine_mode mode
;
179 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
180 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
182 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
183 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
184 return const_true_rtx
;
187 return gen_rtx_raw_CONST_INT (mode
, arg
);
190 /* CONST_DOUBLEs needs special handling because its length is known
193 gen_rtx_CONST_DOUBLE (mode
, arg0
, arg1
, arg2
)
194 enum machine_mode mode
;
196 HOST_WIDE_INT arg1
, arg2
;
198 rtx r
= rtx_alloc (CONST_DOUBLE
);
203 X0EXP (r
, 1) = NULL_RTX
;
207 for (i
= GET_RTX_LENGTH (CONST_DOUBLE
) - 1; i
> 3; --i
)
214 gen_rtx_REG (mode
, regno
)
215 enum machine_mode mode
;
218 /* In case the MD file explicitly references the frame pointer, have
219 all such references point to the same frame pointer. This is
220 used during frame pointer elimination to distinguish the explicit
221 references to these registers from pseudos that happened to be
224 If we have eliminated the frame pointer or arg pointer, we will
225 be using it as a normal register, for example as a spill
226 register. In such cases, we might be accessing it in a mode that
227 is not Pmode and therefore cannot use the pre-allocated rtx.
229 Also don't do this when we are making new REGs in reload, since
230 we don't want to get confused with the real pointers. */
232 if (mode
== Pmode
&& !reload_in_progress
)
234 if (regno
== FRAME_POINTER_REGNUM
)
235 return frame_pointer_rtx
;
236 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
237 if (regno
== HARD_FRAME_POINTER_REGNUM
)
238 return hard_frame_pointer_rtx
;
240 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
241 if (regno
== ARG_POINTER_REGNUM
)
242 return arg_pointer_rtx
;
244 #ifdef RETURN_ADDRESS_POINTER_REGNUM
245 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
246 return return_address_pointer_rtx
;
248 if (regno
== STACK_POINTER_REGNUM
)
249 return stack_pointer_rtx
;
252 return gen_rtx_raw_REG (mode
, regno
);
256 gen_rtx_MEM (mode
, addr
)
257 enum machine_mode mode
;
260 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
262 /* This field is not cleared by the mere allocation of the rtx, so
264 MEM_ALIAS_SET (rt
) = 0;
269 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
271 ** This routine generates an RTX of the size specified by
272 ** <code>, which is an RTX code. The RTX structure is initialized
273 ** from the arguments <element1> through <elementn>, which are
274 ** interpreted according to the specific RTX type's format. The
275 ** special machine mode associated with the rtx (if any) is specified
278 ** gen_rtx can be invoked in a way which resembles the lisp-like
279 ** rtx it will generate. For example, the following rtx structure:
281 ** (plus:QI (mem:QI (reg:SI 1))
282 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
284 ** ...would be generated by the following C code:
286 ** gen_rtx (PLUS, QImode,
287 ** gen_rtx (MEM, QImode,
288 ** gen_rtx (REG, SImode, 1)),
289 ** gen_rtx (MEM, QImode,
290 ** gen_rtx (PLUS, SImode,
291 ** gen_rtx (REG, SImode, 2),
292 ** gen_rtx (REG, SImode, 3)))),
297 gen_rtx
VPROTO((enum rtx_code code
, enum machine_mode mode
, ...))
299 #ifndef ANSI_PROTOTYPES
301 enum machine_mode mode
;
304 register int i
; /* Array indices... */
305 register const char *fmt
; /* Current rtx's format... */
306 register rtx rt_val
; /* RTX to return to caller... */
310 #ifndef ANSI_PROTOTYPES
311 code
= va_arg (p
, enum rtx_code
);
312 mode
= va_arg (p
, enum machine_mode
);
318 rt_val
= gen_rtx_CONST_INT (mode
, va_arg (p
, HOST_WIDE_INT
));
323 rtx arg0
= va_arg (p
, rtx
);
324 HOST_WIDE_INT arg1
= va_arg (p
, HOST_WIDE_INT
);
325 HOST_WIDE_INT arg2
= va_arg (p
, HOST_WIDE_INT
);
326 rt_val
= gen_rtx_CONST_DOUBLE (mode
, arg0
, arg1
, arg2
);
331 rt_val
= gen_rtx_REG (mode
, va_arg (p
, int));
335 rt_val
= gen_rtx_MEM (mode
, va_arg (p
, rtx
));
339 rt_val
= rtx_alloc (code
); /* Allocate the storage space. */
340 rt_val
->mode
= mode
; /* Store the machine mode... */
342 fmt
= GET_RTX_FORMAT (code
); /* Find the right format... */
343 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
347 case '0': /* Unused field. */
350 case 'i': /* An integer? */
351 XINT (rt_val
, i
) = va_arg (p
, int);
354 case 'w': /* A wide integer? */
355 XWINT (rt_val
, i
) = va_arg (p
, HOST_WIDE_INT
);
358 case 's': /* A string? */
359 XSTR (rt_val
, i
) = va_arg (p
, char *);
362 case 'e': /* An expression? */
363 case 'u': /* An insn? Same except when printing. */
364 XEXP (rt_val
, i
) = va_arg (p
, rtx
);
367 case 'E': /* An RTX vector? */
368 XVEC (rt_val
, i
) = va_arg (p
, rtvec
);
371 case 'b': /* A bitmap? */
372 XBITMAP (rt_val
, i
) = va_arg (p
, bitmap
);
375 case 't': /* A tree? */
376 XTREE (rt_val
, i
) = va_arg (p
, tree
);
390 /* gen_rtvec (n, [rt1, ..., rtn])
392 ** This routine creates an rtvec and stores within it the
393 ** pointers to rtx's which are its arguments.
398 gen_rtvec
VPROTO((int n
, ...))
400 #ifndef ANSI_PROTOTYPES
409 #ifndef ANSI_PROTOTYPES
414 return NULL_RTVEC
; /* Don't allocate an empty rtvec... */
416 vector
= (rtx
*) alloca (n
* sizeof (rtx
));
418 for (i
= 0; i
< n
; i
++)
419 vector
[i
] = va_arg (p
, rtx
);
422 return gen_rtvec_v (n
, vector
);
426 gen_rtvec_v (n
, argp
)
431 register rtvec rt_val
;
434 return NULL_RTVEC
; /* Don't allocate an empty rtvec... */
436 rt_val
= rtvec_alloc (n
); /* Allocate an rtvec... */
438 for (i
= 0; i
< n
; i
++)
439 rt_val
->elem
[i
] = *argp
++;
445 /* Generate a REG rtx for a new pseudo register of mode MODE.
446 This pseudo is assigned the next sequential register number. */
450 enum machine_mode mode
;
452 struct function
*f
= current_function
;
455 /* Don't let anything called after initial flow analysis create new
460 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
461 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
)
463 /* For complex modes, don't make a single pseudo.
464 Instead, make a CONCAT of two pseudos.
465 This allows noncontiguous allocation of the real and imaginary parts,
466 which makes much better code. Besides, allocating DCmode
467 pseudos overstrains reload on some machines like the 386. */
468 rtx realpart
, imagpart
;
469 int size
= GET_MODE_UNIT_SIZE (mode
);
470 enum machine_mode partmode
471 = mode_for_size (size
* BITS_PER_UNIT
,
472 (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
473 ? MODE_FLOAT
: MODE_INT
),
476 realpart
= gen_reg_rtx (partmode
);
477 imagpart
= gen_reg_rtx (partmode
);
478 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
481 /* Make sure regno_pointer_flag and regno_reg_rtx are large
482 enough to have an element for this pseudo reg number. */
484 if (reg_rtx_no
== f
->emit
->regno_pointer_flag_length
)
486 int old_size
= f
->emit
->regno_pointer_flag_length
;
489 new = xrealloc (f
->emit
->regno_pointer_flag
, old_size
* 2);
490 memset (new + old_size
, 0, old_size
);
491 f
->emit
->regno_pointer_flag
= new;
493 new = xrealloc (f
->emit
->regno_pointer_align
, old_size
* 2);
494 memset (new + old_size
, 0, old_size
);
495 f
->emit
->regno_pointer_align
= new;
497 new1
= (rtx
*) xrealloc (f
->emit
->x_regno_reg_rtx
,
498 old_size
* 2 * sizeof (rtx
));
499 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
500 regno_reg_rtx
= new1
;
502 f
->emit
->regno_pointer_flag_length
= old_size
* 2;
505 val
= gen_rtx_raw_REG (mode
, reg_rtx_no
);
506 regno_reg_rtx
[reg_rtx_no
++] = val
;
510 /* Identify REG (which may be a CONCAT) as a user register. */
516 if (GET_CODE (reg
) == CONCAT
)
518 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
519 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
521 else if (GET_CODE (reg
) == REG
)
522 REG_USERVAR_P (reg
) = 1;
527 /* Identify REG as a probable pointer register and show its alignment
528 as ALIGN, if nonzero. */
531 mark_reg_pointer (reg
, align
)
535 if (! REGNO_POINTER_FLAG (REGNO (reg
)))
537 REGNO_POINTER_FLAG (REGNO (reg
)) = 1;
540 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
542 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
543 /* We can no-longer be sure just how aligned this pointer is */
544 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
547 /* Return 1 plus largest pseudo reg number used in the current function. */
555 /* Return 1 + the largest label number used so far in the current function. */
560 if (last_label_num
&& label_num
== base_label_num
)
561 return last_label_num
;
565 /* Return first label number used in this function (if any were used). */
568 get_first_label_num ()
570 return first_label_num
;
573 /* Return a value representing some low-order bits of X, where the number
574 of low-order bits is given by MODE. Note that no conversion is done
575 between floating-point and fixed-point values, rather, the bit
576 representation is returned.
578 This function handles the cases in common between gen_lowpart, below,
579 and two variants in cse.c and combine.c. These are the cases that can
580 be safely handled at all points in the compilation.
582 If this is not a case we can handle, return 0. */
585 gen_lowpart_common (mode
, x
)
586 enum machine_mode mode
;
591 if (GET_MODE (x
) == mode
)
594 /* MODE must occupy no more words than the mode of X. */
595 if (GET_MODE (x
) != VOIDmode
596 && ((GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
597 > ((GET_MODE_SIZE (GET_MODE (x
)) + (UNITS_PER_WORD
- 1))
601 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
602 word
= ((GET_MODE_SIZE (GET_MODE (x
))
603 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
606 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
607 && (GET_MODE_CLASS (mode
) == MODE_INT
608 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
610 /* If we are getting the low-order part of something that has been
611 sign- or zero-extended, we can either just use the object being
612 extended or make a narrower extension. If we want an even smaller
613 piece than the size of the object being extended, call ourselves
616 This case is used mostly by combine and cse. */
618 if (GET_MODE (XEXP (x
, 0)) == mode
)
620 else if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
621 return gen_lowpart_common (mode
, XEXP (x
, 0));
622 else if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
)))
623 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
625 else if (GET_CODE (x
) == SUBREG
626 && (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
627 || GET_MODE_SIZE (mode
) == GET_MODE_UNIT_SIZE (GET_MODE (x
))))
628 return (GET_MODE (SUBREG_REG (x
)) == mode
&& SUBREG_WORD (x
) == 0
630 : gen_rtx_SUBREG (mode
, SUBREG_REG (x
), SUBREG_WORD (x
) + word
));
631 else if (GET_CODE (x
) == REG
)
633 /* Let the backend decide how many registers to skip. This is needed
634 in particular for Sparc64 where fp regs are smaller than a word. */
635 /* ??? Note that subregs are now ambiguous, in that those against
636 pseudos are sized by the Word Size, while those against hard
637 regs are sized by the underlying register size. Better would be
638 to always interpret the subreg offset parameter as bytes or bits. */
640 if (WORDS_BIG_ENDIAN
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
641 word
= (HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
))
642 - HARD_REGNO_NREGS (REGNO (x
), mode
));
644 /* If the register is not valid for MODE, return 0. If we don't
645 do this, there is no way to fix up the resulting REG later.
646 But we do do this if the current REG is not valid for its
647 mode. This latter is a kludge, but is required due to the
648 way that parameters are passed on some machines, most
650 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
651 && ! HARD_REGNO_MODE_OK (REGNO (x
) + word
, mode
)
652 && HARD_REGNO_MODE_OK (REGNO (x
), GET_MODE (x
)))
654 else if (REGNO (x
) < FIRST_PSEUDO_REGISTER
655 /* integrate.c can't handle parts of a return value register. */
656 && (! REG_FUNCTION_VALUE_P (x
)
657 || ! rtx_equal_function_value_matters
)
658 #ifdef CLASS_CANNOT_CHANGE_SIZE
659 && ! (GET_MODE_SIZE (mode
) != GET_MODE_SIZE (GET_MODE (x
))
660 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_COMPLEX_INT
661 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_COMPLEX_FLOAT
662 && (TEST_HARD_REG_BIT
663 (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
666 /* We want to keep the stack, frame, and arg pointers
668 && x
!= frame_pointer_rtx
669 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
670 && x
!= arg_pointer_rtx
672 && x
!= stack_pointer_rtx
)
673 return gen_rtx_REG (mode
, REGNO (x
) + word
);
675 return gen_rtx_SUBREG (mode
, x
, word
);
677 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
678 from the low-order part of the constant. */
679 else if ((GET_MODE_CLASS (mode
) == MODE_INT
680 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
681 && GET_MODE (x
) == VOIDmode
682 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
684 /* If MODE is twice the host word size, X is already the desired
685 representation. Otherwise, if MODE is wider than a word, we can't
686 do this. If MODE is exactly a word, return just one CONST_INT.
687 If MODE is smaller than a word, clear the bits that don't belong
688 in our mode, unless they and our sign bit are all one. So we get
689 either a reasonable negative value or a reasonable unsigned value
692 if (GET_MODE_BITSIZE (mode
) >= 2 * HOST_BITS_PER_WIDE_INT
)
694 else if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
696 else if (GET_MODE_BITSIZE (mode
) == HOST_BITS_PER_WIDE_INT
)
697 return (GET_CODE (x
) == CONST_INT
? x
698 : GEN_INT (CONST_DOUBLE_LOW (x
)));
701 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
702 int width
= GET_MODE_BITSIZE (mode
);
703 HOST_WIDE_INT val
= (GET_CODE (x
) == CONST_INT
? INTVAL (x
)
704 : CONST_DOUBLE_LOW (x
));
706 /* Sign extend to HOST_WIDE_INT. */
707 val
= val
<< (HOST_BITS_PER_WIDE_INT
- width
) >> (HOST_BITS_PER_WIDE_INT
- width
);
709 return (GET_CODE (x
) == CONST_INT
&& INTVAL (x
) == val
? x
714 /* If X is an integral constant but we want it in floating-point, it
715 must be the case that we have a union of an integer and a floating-point
716 value. If the machine-parameters allow it, simulate that union here
717 and return the result. The two-word and single-word cases are
720 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
721 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
722 || flag_pretend_float
)
723 && GET_MODE_CLASS (mode
) == MODE_FLOAT
724 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
725 && GET_CODE (x
) == CONST_INT
726 && sizeof (float) * HOST_BITS_PER_CHAR
== HOST_BITS_PER_WIDE_INT
)
727 #ifdef REAL_ARITHMETIC
733 r
= REAL_VALUE_FROM_TARGET_SINGLE (i
);
734 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
738 union {HOST_WIDE_INT i
; float d
; } u
;
741 return CONST_DOUBLE_FROM_REAL_VALUE (u
.d
, mode
);
744 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
745 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
746 || flag_pretend_float
)
747 && GET_MODE_CLASS (mode
) == MODE_FLOAT
748 && GET_MODE_SIZE (mode
) == 2 * UNITS_PER_WORD
749 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
750 && GET_MODE (x
) == VOIDmode
751 && (sizeof (double) * HOST_BITS_PER_CHAR
752 == 2 * HOST_BITS_PER_WIDE_INT
))
753 #ifdef REAL_ARITHMETIC
757 HOST_WIDE_INT low
, high
;
759 if (GET_CODE (x
) == CONST_INT
)
760 low
= INTVAL (x
), high
= low
>> (HOST_BITS_PER_WIDE_INT
-1);
762 low
= CONST_DOUBLE_LOW (x
), high
= CONST_DOUBLE_HIGH (x
);
764 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
766 if (WORDS_BIG_ENDIAN
)
767 i
[0] = high
, i
[1] = low
;
769 i
[0] = low
, i
[1] = high
;
771 r
= REAL_VALUE_FROM_TARGET_DOUBLE (i
);
772 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
776 union {HOST_WIDE_INT i
[2]; double d
; } u
;
777 HOST_WIDE_INT low
, high
;
779 if (GET_CODE (x
) == CONST_INT
)
780 low
= INTVAL (x
), high
= low
>> (HOST_BITS_PER_WIDE_INT
-1);
782 low
= CONST_DOUBLE_LOW (x
), high
= CONST_DOUBLE_HIGH (x
);
784 #ifdef HOST_WORDS_BIG_ENDIAN
785 u
.i
[0] = high
, u
.i
[1] = low
;
787 u
.i
[0] = low
, u
.i
[1] = high
;
790 return CONST_DOUBLE_FROM_REAL_VALUE (u
.d
, mode
);
794 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
795 same as sizeof (double) or when sizeof (float) is larger than the
796 size of a word on the target machine. */
797 #ifdef REAL_ARITHMETIC
798 else if (mode
== SFmode
&& GET_CODE (x
) == CONST_INT
)
804 r
= REAL_VALUE_FROM_TARGET_SINGLE (i
);
805 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
807 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
808 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
809 || flag_pretend_float
)
810 && GET_MODE_CLASS (mode
) == MODE_FLOAT
811 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
812 && GET_CODE (x
) == CONST_INT
813 && (sizeof (double) * HOST_BITS_PER_CHAR
814 == HOST_BITS_PER_WIDE_INT
))
820 r
= REAL_VALUE_FROM_TARGET_DOUBLE (&i
);
821 return CONST_DOUBLE_FROM_REAL_VALUE (r
, mode
);
825 /* Similarly, if this is converting a floating-point value into a
826 single-word integer. Only do this is the host and target parameters are
829 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
830 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
831 || flag_pretend_float
)
832 && (GET_MODE_CLASS (mode
) == MODE_INT
833 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
834 && GET_CODE (x
) == CONST_DOUBLE
835 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
836 && GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
)
837 return operand_subword (x
, word
, 0, GET_MODE (x
));
839 /* Similarly, if this is converting a floating-point value into a
840 two-word integer, we can do this one word at a time and make an
841 integer. Only do this is the host and target parameters are
844 else if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
845 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
846 || flag_pretend_float
)
847 && (GET_MODE_CLASS (mode
) == MODE_INT
848 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
849 && GET_CODE (x
) == CONST_DOUBLE
850 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
851 && GET_MODE_BITSIZE (mode
) == 2 * BITS_PER_WORD
)
854 = operand_subword (x
, word
+ WORDS_BIG_ENDIAN
, 0, GET_MODE (x
));
856 = operand_subword (x
, word
+ ! WORDS_BIG_ENDIAN
, 0, GET_MODE (x
));
858 if (lowpart
&& GET_CODE (lowpart
) == CONST_INT
859 && highpart
&& GET_CODE (highpart
) == CONST_INT
)
860 return immed_double_const (INTVAL (lowpart
), INTVAL (highpart
), mode
);
863 /* Otherwise, we can't do this. */
867 /* Return the real part (which has mode MODE) of a complex value X.
868 This always comes at the low address in memory. */
871 gen_realpart (mode
, x
)
872 enum machine_mode mode
;
875 if (GET_CODE (x
) == CONCAT
&& GET_MODE (XEXP (x
, 0)) == mode
)
877 else if (WORDS_BIG_ENDIAN
878 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
880 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
881 fatal ("Unable to access real part of complex value in a hard register on this target");
882 else if (WORDS_BIG_ENDIAN
)
883 return gen_highpart (mode
, x
);
885 return gen_lowpart (mode
, x
);
888 /* Return the imaginary part (which has mode MODE) of a complex value X.
889 This always comes at the high address in memory. */
892 gen_imagpart (mode
, x
)
893 enum machine_mode mode
;
896 if (GET_CODE (x
) == CONCAT
&& GET_MODE (XEXP (x
, 0)) == mode
)
898 else if (WORDS_BIG_ENDIAN
)
899 return gen_lowpart (mode
, x
);
900 else if (!WORDS_BIG_ENDIAN
901 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
903 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
904 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
906 return gen_highpart (mode
, x
);
909 /* Return 1 iff X, assumed to be a SUBREG,
910 refers to the real part of the complex value in its containing reg.
911 Complex values are always stored with the real part in the first word,
912 regardless of WORDS_BIG_ENDIAN. */
915 subreg_realpart_p (x
)
918 if (GET_CODE (x
) != SUBREG
)
921 return SUBREG_WORD (x
) * UNITS_PER_WORD
< GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x
)));
924 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
925 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
926 least-significant part of X.
927 MODE specifies how big a part of X to return;
928 it usually should not be larger than a word.
929 If X is a MEM whose address is a QUEUED, the value may be so also. */
932 gen_lowpart (mode
, x
)
933 enum machine_mode mode
;
936 rtx result
= gen_lowpart_common (mode
, x
);
940 else if (GET_CODE (x
) == REG
)
942 /* Must be a hard reg that's not valid in MODE. */
943 result
= gen_lowpart_common (mode
, copy_to_reg (x
));
948 else if (GET_CODE (x
) == MEM
)
950 /* The only additional case we can do is MEM. */
951 register int offset
= 0;
952 if (WORDS_BIG_ENDIAN
)
953 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
954 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
956 if (BYTES_BIG_ENDIAN
)
957 /* Adjust the address so that the address-after-the-data
959 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
960 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
962 return change_address (x
, mode
, plus_constant (XEXP (x
, 0), offset
));
964 else if (GET_CODE (x
) == ADDRESSOF
)
965 return gen_lowpart (mode
, force_reg (GET_MODE (x
), x
));
970 /* Like `gen_lowpart', but refer to the most significant part.
971 This is used to access the imaginary part of a complex number. */
974 gen_highpart (mode
, x
)
975 enum machine_mode mode
;
978 /* This case loses if X is a subreg. To catch bugs early,
979 complain if an invalid MODE is used even in other cases. */
980 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
981 && GET_MODE_SIZE (mode
) != GET_MODE_UNIT_SIZE (GET_MODE (x
)))
983 if (GET_CODE (x
) == CONST_DOUBLE
984 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
985 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
988 return GEN_INT (CONST_DOUBLE_HIGH (x
) & GET_MODE_MASK (mode
));
989 else if (GET_CODE (x
) == CONST_INT
)
991 if (HOST_BITS_PER_WIDE_INT
<= BITS_PER_WORD
)
993 return GEN_INT (INTVAL (x
) >> (HOST_BITS_PER_WIDE_INT
- BITS_PER_WORD
));
995 else if (GET_CODE (x
) == MEM
)
997 register int offset
= 0;
998 if (! WORDS_BIG_ENDIAN
)
999 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
1000 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
1002 if (! BYTES_BIG_ENDIAN
1003 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
1004 offset
-= (GET_MODE_SIZE (mode
)
1005 - MIN (UNITS_PER_WORD
,
1006 GET_MODE_SIZE (GET_MODE (x
))));
1008 return change_address (x
, mode
, plus_constant (XEXP (x
, 0), offset
));
1010 else if (GET_CODE (x
) == SUBREG
)
1012 /* The only time this should occur is when we are looking at a
1013 multi-word item with a SUBREG whose mode is the same as that of the
1014 item. It isn't clear what we would do if it wasn't. */
1015 if (SUBREG_WORD (x
) != 0)
1017 return gen_highpart (mode
, SUBREG_REG (x
));
1019 else if (GET_CODE (x
) == REG
)
1023 /* Let the backend decide how many registers to skip. This is needed
1024 in particular for sparc64 where fp regs are smaller than a word. */
1025 /* ??? Note that subregs are now ambiguous, in that those against
1026 pseudos are sized by the word size, while those against hard
1027 regs are sized by the underlying register size. Better would be
1028 to always interpret the subreg offset parameter as bytes or bits. */
1030 if (WORDS_BIG_ENDIAN
)
1032 else if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1033 word
= (HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
))
1034 - HARD_REGNO_NREGS (REGNO (x
), mode
));
1036 word
= ((GET_MODE_SIZE (GET_MODE (x
))
1037 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
1040 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
1041 /* integrate.c can't handle parts of a return value register. */
1042 && (! REG_FUNCTION_VALUE_P (x
)
1043 || ! rtx_equal_function_value_matters
)
1044 /* We want to keep the stack, frame, and arg pointers special. */
1045 && x
!= frame_pointer_rtx
1046 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1047 && x
!= arg_pointer_rtx
1049 && x
!= stack_pointer_rtx
)
1050 return gen_rtx_REG (mode
, REGNO (x
) + word
);
1052 return gen_rtx_SUBREG (mode
, x
, word
);
1058 /* Return 1 iff X, assumed to be a SUBREG,
1059 refers to the least significant part of its containing reg.
1060 If X is not a SUBREG, always return 1 (it is its own low part!). */
1063 subreg_lowpart_p (x
)
1066 if (GET_CODE (x
) != SUBREG
)
1068 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1071 if (WORDS_BIG_ENDIAN
1072 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
)
1073 return (SUBREG_WORD (x
)
1074 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
1075 - MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
))
1078 return SUBREG_WORD (x
) == 0;
1081 /* Return subword I of operand OP.
1082 The word number, I, is interpreted as the word number starting at the
1083 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1084 otherwise it is the high-order word.
1086 If we cannot extract the required word, we return zero. Otherwise, an
1087 rtx corresponding to the requested word will be returned.
1089 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1090 reload has completed, a valid address will always be returned. After
1091 reload, if a valid address cannot be returned, we return zero.
1093 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1094 it is the responsibility of the caller.
1096 MODE is the mode of OP in case it is a CONST_INT. */
1099 operand_subword (op
, i
, validate_address
, mode
)
1102 int validate_address
;
1103 enum machine_mode mode
;
1106 int size_ratio
= HOST_BITS_PER_WIDE_INT
/ BITS_PER_WORD
;
1108 if (mode
== VOIDmode
)
1109 mode
= GET_MODE (op
);
1111 if (mode
== VOIDmode
)
1114 /* If OP is narrower than a word, fail. */
1116 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1119 /* If we want a word outside OP, return zero. */
1121 && (i
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1124 /* If OP is already an integer word, return it. */
1125 if (GET_MODE_CLASS (mode
) == MODE_INT
1126 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
)
1129 /* If OP is a REG or SUBREG, we can handle it very simply. */
1130 if (GET_CODE (op
) == REG
)
1132 /* ??? There is a potential problem with this code. It does not
1133 properly handle extractions of a subword from a hard register
1134 that is larger than word_mode. Presumably the check for
1135 HARD_REGNO_MODE_OK catches these most of these cases. */
1137 /* If OP is a hard register, but OP + I is not a hard register,
1138 then extracting a subword is impossible.
1140 For example, consider if OP is the last hard register and it is
1141 larger than word_mode. If we wanted word N (for N > 0) because a
1142 part of that hard register was known to contain a useful value,
1143 then OP + I would refer to a pseudo, not the hard register we
1145 if (REGNO (op
) < FIRST_PSEUDO_REGISTER
1146 && REGNO (op
) + i
>= FIRST_PSEUDO_REGISTER
)
1149 /* If the register is not valid for MODE, return 0. Note we
1150 have to check both OP and OP + I since they may refer to
1151 different parts of the register file.
1153 Consider if OP refers to the last 96bit FP register and we want
1154 subword 3 because that subword is known to contain a value we
1156 if (REGNO (op
) < FIRST_PSEUDO_REGISTER
1157 && (! HARD_REGNO_MODE_OK (REGNO (op
), word_mode
)
1158 || ! HARD_REGNO_MODE_OK (REGNO (op
) + i
, word_mode
)))
1160 else if (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1161 || (REG_FUNCTION_VALUE_P (op
)
1162 && rtx_equal_function_value_matters
)
1163 /* We want to keep the stack, frame, and arg pointers
1165 || op
== frame_pointer_rtx
1166 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1167 || op
== arg_pointer_rtx
1169 || op
== stack_pointer_rtx
)
1170 return gen_rtx_SUBREG (word_mode
, op
, i
);
1172 return gen_rtx_REG (word_mode
, REGNO (op
) + i
);
1174 else if (GET_CODE (op
) == SUBREG
)
1175 return gen_rtx_SUBREG (word_mode
, SUBREG_REG (op
), i
+ SUBREG_WORD (op
));
1176 else if (GET_CODE (op
) == CONCAT
)
1178 int partwords
= GET_MODE_UNIT_SIZE (GET_MODE (op
)) / UNITS_PER_WORD
;
1180 return operand_subword (XEXP (op
, 0), i
, validate_address
, mode
);
1181 return operand_subword (XEXP (op
, 1), i
- partwords
,
1182 validate_address
, mode
);
1185 /* Form a new MEM at the requested address. */
1186 if (GET_CODE (op
) == MEM
)
1188 rtx addr
= plus_constant (XEXP (op
, 0), i
* UNITS_PER_WORD
);
1191 if (validate_address
)
1193 if (reload_completed
)
1195 if (! strict_memory_address_p (word_mode
, addr
))
1199 addr
= memory_address (word_mode
, addr
);
1202 new = gen_rtx_MEM (word_mode
, addr
);
1204 MEM_COPY_ATTRIBUTES (new, op
);
1205 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1206 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op
);
1211 /* The only remaining cases are when OP is a constant. If the host and
1212 target floating formats are the same, handling two-word floating
1213 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1214 are defined as returning one or two 32 bit values, respectively,
1215 and not values of BITS_PER_WORD bits. */
1216 #ifdef REAL_ARITHMETIC
1217 /* The output is some bits, the width of the target machine's word.
1218 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1220 if (HOST_BITS_PER_WIDE_INT
>= BITS_PER_WORD
1221 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1222 && GET_MODE_BITSIZE (mode
) == 64
1223 && GET_CODE (op
) == CONST_DOUBLE
)
1228 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1229 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1231 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1232 which the words are written depends on the word endianness.
1233 ??? This is a potential portability problem and should
1234 be fixed at some point.
1236 We must excercise caution with the sign bit. By definition there
1237 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1238 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1239 So we explicitly mask and sign-extend as necessary. */
1240 if (BITS_PER_WORD
== 32)
1243 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1244 return GEN_INT (val
);
1246 #if HOST_BITS_PER_WIDE_INT >= 64
1247 else if (BITS_PER_WORD
>= 64 && i
== 0)
1249 val
= k
[! WORDS_BIG_ENDIAN
];
1250 val
= (((val
& 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1251 val
|= (HOST_WIDE_INT
) k
[WORDS_BIG_ENDIAN
] & 0xffffffff;
1252 return GEN_INT (val
);
1255 else if (BITS_PER_WORD
== 16)
1258 if ((i
& 1) == !WORDS_BIG_ENDIAN
)
1261 return GEN_INT (val
);
1266 else if (HOST_BITS_PER_WIDE_INT
>= BITS_PER_WORD
1267 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1268 && GET_MODE_BITSIZE (mode
) > 64
1269 && GET_CODE (op
) == CONST_DOUBLE
)
1274 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1275 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1277 if (BITS_PER_WORD
== 32)
1280 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1281 return GEN_INT (val
);
1286 #else /* no REAL_ARITHMETIC */
1287 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1288 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1289 || flag_pretend_float
)
1290 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1291 && GET_MODE_SIZE (mode
) == 2 * UNITS_PER_WORD
1292 && GET_CODE (op
) == CONST_DOUBLE
)
1294 /* The constant is stored in the host's word-ordering,
1295 but we want to access it in the target's word-ordering. Some
1296 compilers don't like a conditional inside macro args, so we have two
1297 copies of the return. */
1298 #ifdef HOST_WORDS_BIG_ENDIAN
1299 return GEN_INT (i
== WORDS_BIG_ENDIAN
1300 ? CONST_DOUBLE_HIGH (op
) : CONST_DOUBLE_LOW (op
));
1302 return GEN_INT (i
!= WORDS_BIG_ENDIAN
1303 ? CONST_DOUBLE_HIGH (op
) : CONST_DOUBLE_LOW (op
));
1306 #endif /* no REAL_ARITHMETIC */
1308 /* Single word float is a little harder, since single- and double-word
1309 values often do not have the same high-order bits. We have already
1310 verified that we want the only defined word of the single-word value. */
1311 #ifdef REAL_ARITHMETIC
1312 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1313 && GET_MODE_BITSIZE (mode
) == 32
1314 && GET_CODE (op
) == CONST_DOUBLE
)
1319 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1320 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1322 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1324 val
= ((val
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1326 if (BITS_PER_WORD
== 16)
1328 if ((i
& 1) == !WORDS_BIG_ENDIAN
)
1333 return GEN_INT (val
);
1336 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1337 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1338 || flag_pretend_float
)
1339 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1340 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1341 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
1342 && GET_CODE (op
) == CONST_DOUBLE
)
1345 union {float f
; HOST_WIDE_INT i
; } u
;
1347 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
1350 return GEN_INT (u
.i
);
1352 if (((HOST_FLOAT_FORMAT
== TARGET_FLOAT_FORMAT
1353 && HOST_BITS_PER_WIDE_INT
== BITS_PER_WORD
)
1354 || flag_pretend_float
)
1355 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1356 && GET_MODE_CLASS (mode
) == MODE_FLOAT
1357 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
1358 && GET_CODE (op
) == CONST_DOUBLE
)
1361 union {double d
; HOST_WIDE_INT i
; } u
;
1363 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
1366 return GEN_INT (u
.i
);
1368 #endif /* no REAL_ARITHMETIC */
1370 /* The only remaining cases that we can handle are integers.
1371 Convert to proper endianness now since these cases need it.
1372 At this point, i == 0 means the low-order word.
1374 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1375 in general. However, if OP is (const_int 0), we can just return
1378 if (op
== const0_rtx
)
1381 if (GET_MODE_CLASS (mode
) != MODE_INT
1382 || (GET_CODE (op
) != CONST_INT
&& GET_CODE (op
) != CONST_DOUBLE
)
1383 || BITS_PER_WORD
> HOST_BITS_PER_WIDE_INT
)
1386 if (WORDS_BIG_ENDIAN
)
1387 i
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
- 1 - i
;
1389 /* Find out which word on the host machine this value is in and get
1390 it from the constant. */
1391 val
= (i
/ size_ratio
== 0
1392 ? (GET_CODE (op
) == CONST_INT
? INTVAL (op
) : CONST_DOUBLE_LOW (op
))
1393 : (GET_CODE (op
) == CONST_INT
1394 ? (INTVAL (op
) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op
)));
1396 /* Get the value we want into the low bits of val. */
1397 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
)
1398 val
= ((val
>> ((i
% size_ratio
) * BITS_PER_WORD
)));
1400 val
= trunc_int_for_mode (val
, word_mode
);
1402 return GEN_INT (val
);
1405 /* Similar to `operand_subword', but never return 0. If we can't extract
1406 the required subword, put OP into a register and try again. If that fails,
1407 abort. We always validate the address in this case. It is not valid
1408 to call this function after reload; it is mostly meant for RTL
1411 MODE is the mode of OP, in case it is CONST_INT. */
1414 operand_subword_force (op
, i
, mode
)
1417 enum machine_mode mode
;
1419 rtx result
= operand_subword (op
, i
, 1, mode
);
1424 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1426 /* If this is a register which can not be accessed by words, copy it
1427 to a pseudo register. */
1428 if (GET_CODE (op
) == REG
)
1429 op
= copy_to_reg (op
);
1431 op
= force_reg (mode
, op
);
1434 result
= operand_subword (op
, i
, 1, mode
);
1441 /* Given a compare instruction, swap the operands.
1442 A test instruction is changed into a compare of 0 against the operand. */
1445 reverse_comparison (insn
)
1448 rtx body
= PATTERN (insn
);
1451 if (GET_CODE (body
) == SET
)
1452 comp
= SET_SRC (body
);
1454 comp
= SET_SRC (XVECEXP (body
, 0, 0));
1456 if (GET_CODE (comp
) == COMPARE
)
1458 rtx op0
= XEXP (comp
, 0);
1459 rtx op1
= XEXP (comp
, 1);
1460 XEXP (comp
, 0) = op1
;
1461 XEXP (comp
, 1) = op0
;
1465 rtx
new = gen_rtx_COMPARE (VOIDmode
,
1466 CONST0_RTX (GET_MODE (comp
)), comp
);
1467 if (GET_CODE (body
) == SET
)
1468 SET_SRC (body
) = new;
1470 SET_SRC (XVECEXP (body
, 0, 0)) = new;
1474 /* Return a memory reference like MEMREF, but with its mode changed
1475 to MODE and its address changed to ADDR.
1476 (VOIDmode means don't change the mode.
1477 NULL for ADDR means don't change the address.) */
1480 change_address (memref
, mode
, addr
)
1482 enum machine_mode mode
;
1487 if (GET_CODE (memref
) != MEM
)
1489 if (mode
== VOIDmode
)
1490 mode
= GET_MODE (memref
);
1492 addr
= XEXP (memref
, 0);
1494 /* If reload is in progress or has completed, ADDR must be valid.
1495 Otherwise, we can call memory_address to make it valid. */
1496 if (reload_completed
|| reload_in_progress
)
1498 if (! memory_address_p (mode
, addr
))
1502 addr
= memory_address (mode
, addr
);
1504 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
1507 new = gen_rtx_MEM (mode
, addr
);
1508 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref
);
1509 MEM_COPY_ATTRIBUTES (new, memref
);
1510 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref
);
1514 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1521 label
= gen_rtx_CODE_LABEL (VOIDmode
, 0, NULL_RTX
,
1522 NULL_RTX
, label_num
++, NULL_PTR
);
1524 LABEL_NUSES (label
) = 0;
1528 /* For procedure integration. */
1530 /* Install new pointers to the first and last insns in the chain.
1531 Also, set cur_insn_uid to one higher than the last in use.
1532 Used for an inline-procedure after copying the insn chain. */
1535 set_new_first_and_last_insn (first
, last
)
1544 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1545 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
1550 /* Set the range of label numbers found in the current function.
1551 This is used when belatedly compiling an inline function. */
1554 set_new_first_and_last_label_num (first
, last
)
1557 base_label_num
= label_num
;
1558 first_label_num
= first
;
1559 last_label_num
= last
;
1562 /* Set the last label number found in the current function.
1563 This is used when belatedly compiling an inline function. */
1566 set_new_last_label_num (last
)
1569 base_label_num
= label_num
;
1570 last_label_num
= last
;
1573 /* Restore all variables describing the current status from the structure *P.
1574 This is used after a nested function. */
1577 restore_emit_status (p
)
1581 clear_emit_caches ();
1584 /* Clear out all parts of the state in F that can safely be discarded
1585 after the function has been compiled, to let garbage collection
1586 reclaim the memory. */
1589 free_emit_status (f
)
1592 free (f
->emit
->x_regno_reg_rtx
);
1593 free (f
->emit
->regno_pointer_flag
);
1594 free (f
->emit
->regno_pointer_align
);
1599 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1600 It does not work to do this twice, because the mark bits set here
1601 are not cleared afterwards. */
1604 unshare_all_rtl (insn
)
1607 for (; insn
; insn
= NEXT_INSN (insn
))
1608 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
1609 || GET_CODE (insn
) == CALL_INSN
)
1611 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
1612 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
1613 LOG_LINKS (insn
) = copy_rtx_if_shared (LOG_LINKS (insn
));
1616 /* Make sure the addresses of stack slots found outside the insn chain
1617 (such as, in DECL_RTL of a variable) are not shared
1618 with the insn chain.
1620 This special care is necessary when the stack slot MEM does not
1621 actually appear in the insn chain. If it does appear, its address
1622 is unshared from all else at that point. */
1624 copy_rtx_if_shared (stack_slot_list
);
1627 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1628 Recursively does the same for subexpressions. */
1631 copy_rtx_if_shared (orig
)
1634 register rtx x
= orig
;
1636 register enum rtx_code code
;
1637 register const char *format_ptr
;
1643 code
= GET_CODE (x
);
1645 /* These types may be freely shared. */
1658 /* SCRATCH must be shared because they represent distinct values. */
1662 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1663 a LABEL_REF, it isn't sharable. */
1664 if (GET_CODE (XEXP (x
, 0)) == PLUS
1665 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1666 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
1675 /* The chain of insns is not being copied. */
1679 /* A MEM is allowed to be shared if its address is constant.
1681 We used to allow sharing of MEMs which referenced
1682 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1683 that can lose. instantiate_virtual_regs will not unshare
1684 the MEMs, and combine may change the structure of the address
1685 because it looks safe and profitable in one context, but
1686 in some other context it creates unrecognizable RTL. */
1687 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
1696 /* This rtx may not be shared. If it has already been seen,
1697 replace it with a copy of itself. */
1703 copy
= rtx_alloc (code
);
1704 bcopy ((char *) x
, (char *) copy
,
1705 (sizeof (*copy
) - sizeof (copy
->fld
)
1706 + sizeof (copy
->fld
[0]) * GET_RTX_LENGTH (code
)));
1712 /* Now scan the subexpressions recursively.
1713 We can store any replaced subexpressions directly into X
1714 since we know X is not shared! Any vectors in X
1715 must be copied if X was copied. */
1717 format_ptr
= GET_RTX_FORMAT (code
);
1719 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1721 switch (*format_ptr
++)
1724 XEXP (x
, i
) = copy_rtx_if_shared (XEXP (x
, i
));
1728 if (XVEC (x
, i
) != NULL
)
1731 int len
= XVECLEN (x
, i
);
1733 if (copied
&& len
> 0)
1734 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
1735 for (j
= 0; j
< len
; j
++)
1736 XVECEXP (x
, i
, j
) = copy_rtx_if_shared (XVECEXP (x
, i
, j
));
1744 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1745 to look for shared sub-parts. */
1748 reset_used_flags (x
)
1752 register enum rtx_code code
;
1753 register const char *format_ptr
;
1758 code
= GET_CODE (x
);
1760 /* These types may be freely shared so we needn't do any resetting
1781 /* The chain of insns is not being copied. */
1790 format_ptr
= GET_RTX_FORMAT (code
);
1791 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1793 switch (*format_ptr
++)
1796 reset_used_flags (XEXP (x
, i
));
1800 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1801 reset_used_flags (XVECEXP (x
, i
, j
));
1807 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1808 Return X or the rtx for the pseudo reg the value of X was copied into.
1809 OTHER must be valid as a SET_DEST. */
1812 make_safe_from (x
, other
)
1816 switch (GET_CODE (other
))
1819 other
= SUBREG_REG (other
);
1821 case STRICT_LOW_PART
:
1824 other
= XEXP (other
, 0);
1830 if ((GET_CODE (other
) == MEM
1832 && GET_CODE (x
) != REG
1833 && GET_CODE (x
) != SUBREG
)
1834 || (GET_CODE (other
) == REG
1835 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
1836 || reg_mentioned_p (other
, x
))))
1838 rtx temp
= gen_reg_rtx (GET_MODE (x
));
1839 emit_move_insn (temp
, x
);
1845 /* Emission of insns (adding them to the doubly-linked list). */
1847 /* Return the first insn of the current sequence or current function. */
1855 /* Return the last insn emitted in current sequence or current function. */
1863 /* Specify a new insn as the last in the chain. */
1866 set_last_insn (insn
)
1869 if (NEXT_INSN (insn
) != 0)
1874 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1877 get_last_insn_anywhere ()
1879 struct sequence_stack
*stack
;
1882 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
1883 if (stack
->last
!= 0)
1888 /* Return a number larger than any instruction's uid in this function. */
1893 return cur_insn_uid
;
1896 /* Return the next insn. If it is a SEQUENCE, return the first insn
1905 insn
= NEXT_INSN (insn
);
1906 if (insn
&& GET_CODE (insn
) == INSN
1907 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1908 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1914 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1918 previous_insn (insn
)
1923 insn
= PREV_INSN (insn
);
1924 if (insn
&& GET_CODE (insn
) == INSN
1925 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1926 insn
= XVECEXP (PATTERN (insn
), 0, XVECLEN (PATTERN (insn
), 0) - 1);
1932 /* Return the next insn after INSN that is not a NOTE. This routine does not
1933 look inside SEQUENCEs. */
1936 next_nonnote_insn (insn
)
1941 insn
= NEXT_INSN (insn
);
1942 if (insn
== 0 || GET_CODE (insn
) != NOTE
)
1949 /* Return the previous insn before INSN that is not a NOTE. This routine does
1950 not look inside SEQUENCEs. */
1953 prev_nonnote_insn (insn
)
1958 insn
= PREV_INSN (insn
);
1959 if (insn
== 0 || GET_CODE (insn
) != NOTE
)
1966 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
1967 or 0, if there is none. This routine does not look inside
1971 next_real_insn (insn
)
1976 insn
= NEXT_INSN (insn
);
1977 if (insn
== 0 || GET_CODE (insn
) == INSN
1978 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
)
1985 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
1986 or 0, if there is none. This routine does not look inside
1990 prev_real_insn (insn
)
1995 insn
= PREV_INSN (insn
);
1996 if (insn
== 0 || GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
1997 || GET_CODE (insn
) == JUMP_INSN
)
2004 /* Find the next insn after INSN that really does something. This routine
2005 does not look inside SEQUENCEs. Until reload has completed, this is the
2006 same as next_real_insn. */
2009 next_active_insn (insn
)
2014 insn
= NEXT_INSN (insn
);
2016 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
2017 || (GET_CODE (insn
) == INSN
2018 && (! reload_completed
2019 || (GET_CODE (PATTERN (insn
)) != USE
2020 && GET_CODE (PATTERN (insn
)) != CLOBBER
))))
2027 /* Find the last insn before INSN that really does something. This routine
2028 does not look inside SEQUENCEs. Until reload has completed, this is the
2029 same as prev_real_insn. */
2032 prev_active_insn (insn
)
2037 insn
= PREV_INSN (insn
);
2039 || GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == JUMP_INSN
2040 || (GET_CODE (insn
) == INSN
2041 && (! reload_completed
2042 || (GET_CODE (PATTERN (insn
)) != USE
2043 && GET_CODE (PATTERN (insn
)) != CLOBBER
))))
2050 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2058 insn
= NEXT_INSN (insn
);
2059 if (insn
== 0 || GET_CODE (insn
) == CODE_LABEL
)
2066 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2074 insn
= PREV_INSN (insn
);
2075 if (insn
== 0 || GET_CODE (insn
) == CODE_LABEL
)
2083 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2084 and REG_CC_USER notes so we can find it. */
2087 link_cc0_insns (insn
)
2090 rtx user
= next_nonnote_insn (insn
);
2092 if (GET_CODE (user
) == INSN
&& GET_CODE (PATTERN (user
)) == SEQUENCE
)
2093 user
= XVECEXP (PATTERN (user
), 0, 0);
2095 REG_NOTES (user
) = gen_rtx_INSN_LIST (REG_CC_SETTER
, insn
,
2097 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_CC_USER
, user
, REG_NOTES (insn
));
2100 /* Return the next insn that uses CC0 after INSN, which is assumed to
2101 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2102 applied to the result of this function should yield INSN).
2104 Normally, this is simply the next insn. However, if a REG_CC_USER note
2105 is present, it contains the insn that uses CC0.
2107 Return 0 if we can't find the insn. */
2110 next_cc0_user (insn
)
2113 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
2116 return XEXP (note
, 0);
2118 insn
= next_nonnote_insn (insn
);
2119 if (insn
&& GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2120 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2122 if (insn
&& GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
2123 && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
2129 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2130 note, it is the previous insn. */
2133 prev_cc0_setter (insn
)
2136 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2139 return XEXP (note
, 0);
2141 insn
= prev_nonnote_insn (insn
);
2142 if (! sets_cc0_p (PATTERN (insn
)))
2149 /* Try splitting insns that can be split for better scheduling.
2150 PAT is the pattern which might split.
2151 TRIAL is the insn providing PAT.
2152 LAST is non-zero if we should return the last insn of the sequence produced.
2154 If this routine succeeds in splitting, it returns the first or last
2155 replacement insn depending on the value of LAST. Otherwise, it
2156 returns TRIAL. If the insn to be returned can be split, it will be. */
2159 try_split (pat
, trial
, last
)
2163 rtx before
= PREV_INSN (trial
);
2164 rtx after
= NEXT_INSN (trial
);
2165 rtx seq
= split_insns (pat
, trial
);
2166 int has_barrier
= 0;
2169 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2170 We may need to handle this specially. */
2171 if (after
&& GET_CODE (after
) == BARRIER
)
2174 after
= NEXT_INSN (after
);
2179 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2180 The latter case will normally arise only when being done so that
2181 it, in turn, will be split (SFmode on the 29k is an example). */
2182 if (GET_CODE (seq
) == SEQUENCE
)
2184 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2185 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2186 increment the usage count so we don't delete the label. */
2189 if (GET_CODE (trial
) == JUMP_INSN
)
2190 for (i
= XVECLEN (seq
, 0) - 1; i
>= 0; i
--)
2191 if (GET_CODE (XVECEXP (seq
, 0, i
)) == JUMP_INSN
)
2193 JUMP_LABEL (XVECEXP (seq
, 0, i
)) = JUMP_LABEL (trial
);
2195 if (JUMP_LABEL (trial
))
2196 LABEL_NUSES (JUMP_LABEL (trial
))++;
2199 tem
= emit_insn_after (seq
, before
);
2201 delete_insn (trial
);
2203 emit_barrier_after (tem
);
2205 /* Recursively call try_split for each new insn created; by the
2206 time control returns here that insn will be fully split, so
2207 set LAST and continue from the insn after the one returned.
2208 We can't use next_active_insn here since AFTER may be a note.
2209 Ignore deleted insns, which can be occur if not optimizing. */
2210 for (tem
= NEXT_INSN (before
); tem
!= after
;
2211 tem
= NEXT_INSN (tem
))
2212 if (! INSN_DELETED_P (tem
)
2213 && GET_RTX_CLASS (GET_CODE (tem
)) == 'i')
2214 tem
= try_split (PATTERN (tem
), tem
, 1);
2216 /* Avoid infinite loop if the result matches the original pattern. */
2217 else if (rtx_equal_p (seq
, pat
))
2221 PATTERN (trial
) = seq
;
2222 INSN_CODE (trial
) = -1;
2223 try_split (seq
, trial
, last
);
2226 /* Return either the first or the last insn, depending on which was
2228 return last
? prev_active_insn (after
) : next_active_insn (before
);
2234 /* Make and return an INSN rtx, initializing all its slots.
2235 Store PATTERN in the pattern slots. */
2238 make_insn_raw (pattern
)
2243 /* If in RTL generation phase, see if FREE_INSN can be used. */
2244 if (!ggc_p
&& free_insn
!= 0 && rtx_equal_function_value_matters
)
2247 free_insn
= NEXT_INSN (free_insn
);
2248 PUT_CODE (insn
, INSN
);
2251 insn
= rtx_alloc (INSN
);
2253 INSN_UID (insn
) = cur_insn_uid
++;
2254 PATTERN (insn
) = pattern
;
2255 INSN_CODE (insn
) = -1;
2256 LOG_LINKS (insn
) = NULL
;
2257 REG_NOTES (insn
) = NULL
;
2262 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2265 make_jump_insn_raw (pattern
)
2270 insn
= rtx_alloc (JUMP_INSN
);
2271 INSN_UID (insn
) = cur_insn_uid
++;
2273 PATTERN (insn
) = pattern
;
2274 INSN_CODE (insn
) = -1;
2275 LOG_LINKS (insn
) = NULL
;
2276 REG_NOTES (insn
) = NULL
;
2277 JUMP_LABEL (insn
) = NULL
;
2282 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2285 make_call_insn_raw (pattern
)
2290 insn
= rtx_alloc (CALL_INSN
);
2291 INSN_UID (insn
) = cur_insn_uid
++;
2293 PATTERN (insn
) = pattern
;
2294 INSN_CODE (insn
) = -1;
2295 LOG_LINKS (insn
) = NULL
;
2296 REG_NOTES (insn
) = NULL
;
2297 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
2302 /* Add INSN to the end of the doubly-linked list.
2303 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2309 PREV_INSN (insn
) = last_insn
;
2310 NEXT_INSN (insn
) = 0;
2312 if (NULL
!= last_insn
)
2313 NEXT_INSN (last_insn
) = insn
;
2315 if (NULL
== first_insn
)
2321 /* Add INSN into the doubly-linked list after insn AFTER. This and
2322 the next should be the only functions called to insert an insn once
2323 delay slots have been filled since only they know how to update a
2327 add_insn_after (insn
, after
)
2330 rtx next
= NEXT_INSN (after
);
2332 if (optimize
&& INSN_DELETED_P (after
))
2335 NEXT_INSN (insn
) = next
;
2336 PREV_INSN (insn
) = after
;
2340 PREV_INSN (next
) = insn
;
2341 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2342 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = insn
;
2344 else if (last_insn
== after
)
2348 struct sequence_stack
*stack
= seq_stack
;
2349 /* Scan all pending sequences too. */
2350 for (; stack
; stack
= stack
->next
)
2351 if (after
== stack
->last
)
2361 NEXT_INSN (after
) = insn
;
2362 if (GET_CODE (after
) == INSN
&& GET_CODE (PATTERN (after
)) == SEQUENCE
)
2364 rtx sequence
= PATTERN (after
);
2365 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
2369 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2370 the previous should be the only functions called to insert an insn once
2371 delay slots have been filled since only they know how to update a
2375 add_insn_before (insn
, before
)
2378 rtx prev
= PREV_INSN (before
);
2380 if (optimize
&& INSN_DELETED_P (before
))
2383 PREV_INSN (insn
) = prev
;
2384 NEXT_INSN (insn
) = before
;
2388 NEXT_INSN (prev
) = insn
;
2389 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2391 rtx sequence
= PATTERN (prev
);
2392 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
2395 else if (first_insn
== before
)
2399 struct sequence_stack
*stack
= seq_stack
;
2400 /* Scan all pending sequences too. */
2401 for (; stack
; stack
= stack
->next
)
2402 if (before
== stack
->first
)
2404 stack
->first
= insn
;
2412 PREV_INSN (before
) = insn
;
2413 if (GET_CODE (before
) == INSN
&& GET_CODE (PATTERN (before
)) == SEQUENCE
)
2414 PREV_INSN (XVECEXP (PATTERN (before
), 0, 0)) = insn
;
2417 /* Remove an insn from its doubly-linked list. This function knows how
2418 to handle sequences. */
2423 rtx next
= NEXT_INSN (insn
);
2424 rtx prev
= PREV_INSN (insn
);
2427 NEXT_INSN (prev
) = next
;
2428 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2430 rtx sequence
= PATTERN (prev
);
2431 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
2434 else if (first_insn
== insn
)
2438 struct sequence_stack
*stack
= seq_stack
;
2439 /* Scan all pending sequences too. */
2440 for (; stack
; stack
= stack
->next
)
2441 if (insn
== stack
->first
)
2443 stack
->first
= next
;
2453 PREV_INSN (next
) = prev
;
2454 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
2455 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
2457 else if (last_insn
== insn
)
2461 struct sequence_stack
*stack
= seq_stack
;
2462 /* Scan all pending sequences too. */
2463 for (; stack
; stack
= stack
->next
)
2464 if (insn
== stack
->last
)
2475 /* Delete all insns made since FROM.
2476 FROM becomes the new last instruction. */
2479 delete_insns_since (from
)
2485 NEXT_INSN (from
) = 0;
2489 /* This function is deprecated, please use sequences instead.
2491 Move a consecutive bunch of insns to a different place in the chain.
2492 The insns to be moved are those between FROM and TO.
2493 They are moved to a new position after the insn AFTER.
2494 AFTER must not be FROM or TO or any insn in between.
2496 This function does not know about SEQUENCEs and hence should not be
2497 called after delay-slot filling has been done. */
2500 reorder_insns (from
, to
, after
)
2501 rtx from
, to
, after
;
2503 /* Splice this bunch out of where it is now. */
2504 if (PREV_INSN (from
))
2505 NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
2507 PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
2508 if (last_insn
== to
)
2509 last_insn
= PREV_INSN (from
);
2510 if (first_insn
== from
)
2511 first_insn
= NEXT_INSN (to
);
2513 /* Make the new neighbors point to it and it to them. */
2514 if (NEXT_INSN (after
))
2515 PREV_INSN (NEXT_INSN (after
)) = to
;
2517 NEXT_INSN (to
) = NEXT_INSN (after
);
2518 PREV_INSN (from
) = after
;
2519 NEXT_INSN (after
) = from
;
2520 if (after
== last_insn
)
2524 /* Return the line note insn preceding INSN. */
2527 find_line_note (insn
)
2530 if (no_line_numbers
)
2533 for (; insn
; insn
= PREV_INSN (insn
))
2534 if (GET_CODE (insn
) == NOTE
2535 && NOTE_LINE_NUMBER (insn
) >= 0)
2541 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2542 of the moved insns when debugging. This may insert a note between AFTER
2543 and FROM, and another one after TO. */
2546 reorder_insns_with_line_notes (from
, to
, after
)
2547 rtx from
, to
, after
;
2549 rtx from_line
= find_line_note (from
);
2550 rtx after_line
= find_line_note (after
);
2552 reorder_insns (from
, to
, after
);
2554 if (from_line
== after_line
)
2558 emit_line_note_after (NOTE_SOURCE_FILE (from_line
),
2559 NOTE_LINE_NUMBER (from_line
),
2562 emit_line_note_after (NOTE_SOURCE_FILE (after_line
),
2563 NOTE_LINE_NUMBER (after_line
),
2567 /* Emit an insn of given code and pattern
2568 at a specified place within the doubly-linked list. */
2570 /* Make an instruction with body PATTERN
2571 and output it before the instruction BEFORE. */
2574 emit_insn_before (pattern
, before
)
2575 register rtx pattern
, before
;
2577 register rtx insn
= before
;
2579 if (GET_CODE (pattern
) == SEQUENCE
)
2583 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2585 insn
= XVECEXP (pattern
, 0, i
);
2586 add_insn_before (insn
, before
);
2588 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2589 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2593 insn
= make_insn_raw (pattern
);
2594 add_insn_before (insn
, before
);
2600 /* Make an instruction with body PATTERN and code JUMP_INSN
2601 and output it before the instruction BEFORE. */
2604 emit_jump_insn_before (pattern
, before
)
2605 register rtx pattern
, before
;
2609 if (GET_CODE (pattern
) == SEQUENCE
)
2610 insn
= emit_insn_before (pattern
, before
);
2613 insn
= make_jump_insn_raw (pattern
);
2614 add_insn_before (insn
, before
);
2620 /* Make an instruction with body PATTERN and code CALL_INSN
2621 and output it before the instruction BEFORE. */
2624 emit_call_insn_before (pattern
, before
)
2625 register rtx pattern
, before
;
2629 if (GET_CODE (pattern
) == SEQUENCE
)
2630 insn
= emit_insn_before (pattern
, before
);
2633 insn
= make_call_insn_raw (pattern
);
2634 add_insn_before (insn
, before
);
2635 PUT_CODE (insn
, CALL_INSN
);
2641 /* Make an insn of code BARRIER
2642 and output it before the insn BEFORE. */
2645 emit_barrier_before (before
)
2646 register rtx before
;
2648 register rtx insn
= rtx_alloc (BARRIER
);
2650 INSN_UID (insn
) = cur_insn_uid
++;
2652 add_insn_before (insn
, before
);
2656 /* Emit the label LABEL before the insn BEFORE. */
2659 emit_label_before (label
, before
)
2662 /* This can be called twice for the same label as a result of the
2663 confusion that follows a syntax error! So make it harmless. */
2664 if (INSN_UID (label
) == 0)
2666 INSN_UID (label
) = cur_insn_uid
++;
2667 add_insn_before (label
, before
);
2673 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2676 emit_note_before (subtype
, before
)
2680 register rtx note
= rtx_alloc (NOTE
);
2681 INSN_UID (note
) = cur_insn_uid
++;
2682 NOTE_SOURCE_FILE (note
) = 0;
2683 NOTE_LINE_NUMBER (note
) = subtype
;
2685 add_insn_before (note
, before
);
2689 /* Make an insn of code INSN with body PATTERN
2690 and output it after the insn AFTER. */
2693 emit_insn_after (pattern
, after
)
2694 register rtx pattern
, after
;
2696 register rtx insn
= after
;
2698 if (GET_CODE (pattern
) == SEQUENCE
)
2702 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2704 insn
= XVECEXP (pattern
, 0, i
);
2705 add_insn_after (insn
, after
);
2708 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2709 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2713 insn
= make_insn_raw (pattern
);
2714 add_insn_after (insn
, after
);
2720 /* Similar to emit_insn_after, except that line notes are to be inserted so
2721 as to act as if this insn were at FROM. */
2724 emit_insn_after_with_line_notes (pattern
, after
, from
)
2725 rtx pattern
, after
, from
;
2727 rtx from_line
= find_line_note (from
);
2728 rtx after_line
= find_line_note (after
);
2729 rtx insn
= emit_insn_after (pattern
, after
);
2732 emit_line_note_after (NOTE_SOURCE_FILE (from_line
),
2733 NOTE_LINE_NUMBER (from_line
),
2737 emit_line_note_after (NOTE_SOURCE_FILE (after_line
),
2738 NOTE_LINE_NUMBER (after_line
),
2742 /* Make an insn of code JUMP_INSN with body PATTERN
2743 and output it after the insn AFTER. */
2746 emit_jump_insn_after (pattern
, after
)
2747 register rtx pattern
, after
;
2751 if (GET_CODE (pattern
) == SEQUENCE
)
2752 insn
= emit_insn_after (pattern
, after
);
2755 insn
= make_jump_insn_raw (pattern
);
2756 add_insn_after (insn
, after
);
2762 /* Make an insn of code BARRIER
2763 and output it after the insn AFTER. */
2766 emit_barrier_after (after
)
2769 register rtx insn
= rtx_alloc (BARRIER
);
2771 INSN_UID (insn
) = cur_insn_uid
++;
2773 add_insn_after (insn
, after
);
2777 /* Emit the label LABEL after the insn AFTER. */
2780 emit_label_after (label
, after
)
2783 /* This can be called twice for the same label
2784 as a result of the confusion that follows a syntax error!
2785 So make it harmless. */
2786 if (INSN_UID (label
) == 0)
2788 INSN_UID (label
) = cur_insn_uid
++;
2789 add_insn_after (label
, after
);
2795 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2798 emit_note_after (subtype
, after
)
2802 register rtx note
= rtx_alloc (NOTE
);
2803 INSN_UID (note
) = cur_insn_uid
++;
2804 NOTE_SOURCE_FILE (note
) = 0;
2805 NOTE_LINE_NUMBER (note
) = subtype
;
2806 add_insn_after (note
, after
);
2810 /* Emit a line note for FILE and LINE after the insn AFTER. */
2813 emit_line_note_after (file
, line
, after
)
2820 if (no_line_numbers
&& line
> 0)
2826 note
= rtx_alloc (NOTE
);
2827 INSN_UID (note
) = cur_insn_uid
++;
2828 NOTE_SOURCE_FILE (note
) = file
;
2829 NOTE_LINE_NUMBER (note
) = line
;
2830 add_insn_after (note
, after
);
2834 /* Make an insn of code INSN with pattern PATTERN
2835 and add it to the end of the doubly-linked list.
2836 If PATTERN is a SEQUENCE, take the elements of it
2837 and emit an insn for each element.
2839 Returns the last insn emitted. */
2845 rtx insn
= last_insn
;
2847 if (GET_CODE (pattern
) == SEQUENCE
)
2851 for (i
= 0; i
< XVECLEN (pattern
, 0); i
++)
2853 insn
= XVECEXP (pattern
, 0, i
);
2856 if (!ggc_p
&& XVECLEN (pattern
, 0) < SEQUENCE_RESULT_SIZE
)
2857 sequence_result
[XVECLEN (pattern
, 0)] = pattern
;
2861 insn
= make_insn_raw (pattern
);
2868 /* Emit the insns in a chain starting with INSN.
2869 Return the last insn emitted. */
2879 rtx next
= NEXT_INSN (insn
);
2888 /* Emit the insns in a chain starting with INSN and place them in front of
2889 the insn BEFORE. Return the last insn emitted. */
2892 emit_insns_before (insn
, before
)
2900 rtx next
= NEXT_INSN (insn
);
2901 add_insn_before (insn
, before
);
2909 /* Emit the insns in a chain starting with FIRST and place them in back of
2910 the insn AFTER. Return the last insn emitted. */
2913 emit_insns_after (first
, after
)
2918 register rtx after_after
;
2926 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
2929 after_after
= NEXT_INSN (after
);
2931 NEXT_INSN (after
) = first
;
2932 PREV_INSN (first
) = after
;
2933 NEXT_INSN (last
) = after_after
;
2935 PREV_INSN (after_after
) = last
;
2937 if (after
== last_insn
)
2942 /* Make an insn of code JUMP_INSN with pattern PATTERN
2943 and add it to the end of the doubly-linked list. */
2946 emit_jump_insn (pattern
)
2949 if (GET_CODE (pattern
) == SEQUENCE
)
2950 return emit_insn (pattern
);
2953 register rtx insn
= make_jump_insn_raw (pattern
);
2959 /* Make an insn of code CALL_INSN with pattern PATTERN
2960 and add it to the end of the doubly-linked list. */
2963 emit_call_insn (pattern
)
2966 if (GET_CODE (pattern
) == SEQUENCE
)
2967 return emit_insn (pattern
);
2970 register rtx insn
= make_call_insn_raw (pattern
);
2972 PUT_CODE (insn
, CALL_INSN
);
2977 /* Add the label LABEL to the end of the doubly-linked list. */
2983 /* This can be called twice for the same label
2984 as a result of the confusion that follows a syntax error!
2985 So make it harmless. */
2986 if (INSN_UID (label
) == 0)
2988 INSN_UID (label
) = cur_insn_uid
++;
2994 /* Make an insn of code BARRIER
2995 and add it to the end of the doubly-linked list. */
3000 register rtx barrier
= rtx_alloc (BARRIER
);
3001 INSN_UID (barrier
) = cur_insn_uid
++;
3006 /* Make an insn of code NOTE
3007 with data-fields specified by FILE and LINE
3008 and add it to the end of the doubly-linked list,
3009 but only if line-numbers are desired for debugging info. */
3012 emit_line_note (file
, line
)
3016 set_file_and_line_for_stmt (file
, line
);
3019 if (no_line_numbers
)
3023 return emit_note (file
, line
);
3026 /* Make an insn of code NOTE
3027 with data-fields specified by FILE and LINE
3028 and add it to the end of the doubly-linked list.
3029 If it is a line-number NOTE, omit it if it matches the previous one. */
3032 emit_note (file
, line
)
3040 if (file
&& last_filename
&& !strcmp (file
, last_filename
)
3041 && line
== last_linenum
)
3043 last_filename
= file
;
3044 last_linenum
= line
;
3047 if (no_line_numbers
&& line
> 0)
3053 note
= rtx_alloc (NOTE
);
3054 INSN_UID (note
) = cur_insn_uid
++;
3055 NOTE_SOURCE_FILE (note
) = file
;
3056 NOTE_LINE_NUMBER (note
) = line
;
3061 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3064 emit_line_note_force (file
, line
)
3069 return emit_line_note (file
, line
);
3072 /* Cause next statement to emit a line note even if the line number
3073 has not changed. This is used at the beginning of a function. */
3076 force_next_line_note ()
3081 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3082 note of this type already exists, remove it first. */
3085 set_unique_reg_note (insn
, kind
, datum
)
3090 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
3092 /* First remove the note if there already is one. */
3094 remove_note (insn
, note
);
3096 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (kind
, datum
, REG_NOTES (insn
));
3099 /* Return an indication of which type of insn should have X as a body.
3100 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3106 if (GET_CODE (x
) == CODE_LABEL
)
3108 if (GET_CODE (x
) == CALL
)
3110 if (GET_CODE (x
) == RETURN
)
3112 if (GET_CODE (x
) == SET
)
3114 if (SET_DEST (x
) == pc_rtx
)
3116 else if (GET_CODE (SET_SRC (x
)) == CALL
)
3121 if (GET_CODE (x
) == PARALLEL
)
3124 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
3125 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
3127 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
3128 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
3130 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
3131 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
3137 /* Emit the rtl pattern X as an appropriate kind of insn.
3138 If X is a label, it is simply added into the insn chain. */
3144 enum rtx_code code
= classify_insn (x
);
3146 if (code
== CODE_LABEL
)
3147 return emit_label (x
);
3148 else if (code
== INSN
)
3149 return emit_insn (x
);
3150 else if (code
== JUMP_INSN
)
3152 register rtx insn
= emit_jump_insn (x
);
3153 if (simplejump_p (insn
) || GET_CODE (x
) == RETURN
)
3154 return emit_barrier ();
3157 else if (code
== CALL_INSN
)
3158 return emit_call_insn (x
);
3163 /* Begin emitting insns to a sequence which can be packaged in an
3164 RTL_EXPR. If this sequence will contain something that might cause
3165 the compiler to pop arguments to function calls (because those
3166 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3167 details), use do_pending_stack_adjust before calling this function.
3168 That will ensure that the deferred pops are not accidentally
3169 emitted in the middel of this sequence. */
3174 struct sequence_stack
*tem
;
3176 tem
= (struct sequence_stack
*) xmalloc (sizeof (struct sequence_stack
));
3178 tem
->next
= seq_stack
;
3179 tem
->first
= first_insn
;
3180 tem
->last
= last_insn
;
3181 tem
->sequence_rtl_expr
= seq_rtl_expr
;
3189 /* Similarly, but indicate that this sequence will be placed in T, an
3190 RTL_EXPR. See the documentation for start_sequence for more
3191 information about how to use this function. */
3194 start_sequence_for_rtl_expr (t
)
3202 /* Set up the insn chain starting with FIRST as the current sequence,
3203 saving the previously current one. See the documentation for
3204 start_sequence for more information about how to use this function. */
3207 push_to_sequence (first
)
3214 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
));
3220 /* Set up the outer-level insn chain
3221 as the current sequence, saving the previously current one. */
3224 push_topmost_sequence ()
3226 struct sequence_stack
*stack
, *top
= NULL
;
3230 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3233 first_insn
= top
->first
;
3234 last_insn
= top
->last
;
3235 seq_rtl_expr
= top
->sequence_rtl_expr
;
3238 /* After emitting to the outer-level insn chain, update the outer-level
3239 insn chain, and restore the previous saved state. */
3242 pop_topmost_sequence ()
3244 struct sequence_stack
*stack
, *top
= NULL
;
3246 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3249 top
->first
= first_insn
;
3250 top
->last
= last_insn
;
3251 /* ??? Why don't we save seq_rtl_expr here? */
3256 /* After emitting to a sequence, restore previous saved state.
3258 To get the contents of the sequence just made, you must call
3259 `gen_sequence' *before* calling here.
3261 If the compiler might have deferred popping arguments while
3262 generating this sequence, and this sequence will not be immediately
3263 inserted into the instruction stream, use do_pending_stack_adjust
3264 before calling gen_sequence. That will ensure that the deferred
3265 pops are inserted into this sequence, and not into some random
3266 location in the instruction stream. See INHIBIT_DEFER_POP for more
3267 information about deferred popping of arguments. */
3272 struct sequence_stack
*tem
= seq_stack
;
3274 first_insn
= tem
->first
;
3275 last_insn
= tem
->last
;
3276 seq_rtl_expr
= tem
->sequence_rtl_expr
;
3277 seq_stack
= tem
->next
;
3282 /* Return 1 if currently emitting into a sequence. */
3287 return seq_stack
!= 0;
3290 /* Generate a SEQUENCE rtx containing the insns already emitted
3291 to the current sequence.
3293 This is how the gen_... function from a DEFINE_EXPAND
3294 constructs the SEQUENCE that it returns. */
3304 /* Count the insns in the chain. */
3306 for (tem
= first_insn
; tem
; tem
= NEXT_INSN (tem
))
3309 /* If only one insn, return its pattern rather than a SEQUENCE.
3310 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3311 the case of an empty list.) */
3313 && ! RTX_FRAME_RELATED_P (first_insn
)
3314 && (GET_CODE (first_insn
) == INSN
3315 || GET_CODE (first_insn
) == JUMP_INSN
3316 /* Don't discard the call usage field. */
3317 || (GET_CODE (first_insn
) == CALL_INSN
3318 && CALL_INSN_FUNCTION_USAGE (first_insn
) == NULL_RTX
)))
3322 NEXT_INSN (first_insn
) = free_insn
;
3323 free_insn
= first_insn
;
3325 return PATTERN (first_insn
);
3328 /* Put them in a vector. See if we already have a SEQUENCE of the
3329 appropriate length around. */
3330 if (!ggc_p
&& len
< SEQUENCE_RESULT_SIZE
3331 && (result
= sequence_result
[len
]) != 0)
3332 sequence_result
[len
] = 0;
3335 /* Ensure that this rtl goes in saveable_obstack, since we may
3337 push_obstacks_nochange ();
3338 rtl_in_saveable_obstack ();
3339 result
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (len
));
3343 for (i
= 0, tem
= first_insn
; tem
; tem
= NEXT_INSN (tem
), i
++)
3344 XVECEXP (result
, 0, i
) = tem
;
3349 /* Put the various virtual registers into REGNO_REG_RTX. */
3352 init_virtual_regs (es
)
3353 struct emit_status
*es
;
3355 rtx
*ptr
= es
->x_regno_reg_rtx
;
3356 ptr
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
3357 ptr
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
3358 ptr
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
3359 ptr
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
3360 ptr
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
3364 clear_emit_caches ()
3368 /* Clear the start_sequence/gen_sequence cache. */
3369 for (i
= 0; i
< SEQUENCE_RESULT_SIZE
; i
++)
3370 sequence_result
[i
] = 0;
3374 /* Initialize data structures and variables in this file
3375 before generating rtl for each function. */
3380 struct function
*f
= current_function
;
3382 f
->emit
= (struct emit_status
*) xmalloc (sizeof (struct emit_status
));
3385 seq_rtl_expr
= NULL
;
3387 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
3390 first_label_num
= label_num
;
3394 clear_emit_caches ();
3396 /* Init the tables that describe all the pseudo regs. */
3398 f
->emit
->regno_pointer_flag_length
= LAST_VIRTUAL_REGISTER
+ 101;
3400 f
->emit
->regno_pointer_flag
3401 = (char *) xcalloc (f
->emit
->regno_pointer_flag_length
, sizeof (char));
3403 f
->emit
->regno_pointer_align
3404 = (char *) xcalloc (f
->emit
->regno_pointer_flag_length
,
3408 = (rtx
*) xcalloc (f
->emit
->regno_pointer_flag_length
* sizeof (rtx
),
3411 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3412 init_virtual_regs (f
->emit
);
3414 /* Indicate that the virtual registers and stack locations are
3416 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM
) = 1;
3417 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM
) = 1;
3418 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM
) = 1;
3419 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM
) = 1;
3421 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM
) = 1;
3422 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM
) = 1;
3423 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM
) = 1;
3424 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM
) = 1;
3425 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM
) = 1;
3427 #ifdef STACK_BOUNDARY
3428 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3429 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3430 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
)
3431 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3432 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3434 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
)
3435 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3436 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
)
3437 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3438 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
)
3439 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3440 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
)
3441 = STACK_BOUNDARY
/ BITS_PER_UNIT
;
3442 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = UNITS_PER_WORD
;
3445 #ifdef INIT_EXPANDERS
3450 /* Mark SS for GC. */
3453 mark_sequence_stack (ss
)
3454 struct sequence_stack
*ss
;
3458 ggc_mark_rtx (ss
->first
);
3459 ggc_mark_tree (ss
->sequence_rtl_expr
);
3464 /* Mark ES for GC. */
3467 mark_emit_status (es
)
3468 struct emit_status
*es
;
3476 for (i
= es
->regno_pointer_flag_length
, r
= es
->x_regno_reg_rtx
;
3480 mark_sequence_stack (es
->sequence_stack
);
3481 ggc_mark_tree (es
->sequence_rtl_expr
);
3482 ggc_mark_rtx (es
->x_first_insn
);
3485 /* Create some permanent unique rtl objects shared between all functions.
3486 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3489 init_emit_once (line_numbers
)
3493 enum machine_mode mode
;
3494 enum machine_mode double_mode
;
3496 no_line_numbers
= ! line_numbers
;
3498 /* Assign register numbers to the globally defined register rtx.
3499 This must be done at runtime because the register number field
3500 is in a union and some compilers can't initialize unions. */
3502 pc_rtx
= gen_rtx (PC
, VOIDmode
);
3503 cc0_rtx
= gen_rtx (CC0
, VOIDmode
);
3504 stack_pointer_rtx
= gen_rtx_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
3505 frame_pointer_rtx
= gen_rtx_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
3506 if (hard_frame_pointer_rtx
== 0)
3507 hard_frame_pointer_rtx
= gen_rtx_raw_REG (Pmode
,
3508 HARD_FRAME_POINTER_REGNUM
);
3509 if (arg_pointer_rtx
== 0)
3510 arg_pointer_rtx
= gen_rtx_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
3511 virtual_incoming_args_rtx
=
3512 gen_rtx_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
3513 virtual_stack_vars_rtx
=
3514 gen_rtx_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
3515 virtual_stack_dynamic_rtx
=
3516 gen_rtx_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
3517 virtual_outgoing_args_rtx
=
3518 gen_rtx_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
3519 virtual_cfa_rtx
= gen_rtx_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
3521 /* These rtx must be roots if GC is enabled. */
3523 ggc_add_rtx_root (global_rtl
, GR_MAX
);
3525 #ifdef INIT_EXPANDERS
3526 /* This is to initialize save_machine_status and restore_machine_status before
3527 the first call to push_function_context_to. This is needed by the Chill
3528 front end which calls push_function_context_to before the first cal to
3529 init_function_start. */
3533 /* Compute the word and byte modes. */
3535 byte_mode
= VOIDmode
;
3536 word_mode
= VOIDmode
;
3537 double_mode
= VOIDmode
;
3539 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3540 mode
= GET_MODE_WIDER_MODE (mode
))
3542 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
3543 && byte_mode
== VOIDmode
)
3546 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
3547 && word_mode
== VOIDmode
)
3551 #ifndef DOUBLE_TYPE_SIZE
3552 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3555 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
3556 mode
= GET_MODE_WIDER_MODE (mode
))
3558 if (GET_MODE_BITSIZE (mode
) == DOUBLE_TYPE_SIZE
3559 && double_mode
== VOIDmode
)
3563 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
3565 /* Create the unique rtx's for certain rtx codes and operand values. */
3567 /* Don't use gen_rtx here since gen_rtx in this case
3568 tries to use these variables. */
3569 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
3570 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
3571 gen_rtx_raw_CONST_INT (VOIDmode
, i
);
3573 ggc_add_rtx_root (const_int_rtx
, 2 * MAX_SAVED_CONST_INT
+ 1);
3575 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
3576 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
3577 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
3579 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
3581 dconst0
= REAL_VALUE_ATOF ("0", double_mode
);
3582 dconst1
= REAL_VALUE_ATOF ("1", double_mode
);
3583 dconst2
= REAL_VALUE_ATOF ("2", double_mode
);
3584 dconstm1
= REAL_VALUE_ATOF ("-1", double_mode
);
3586 for (i
= 0; i
<= 2; i
++)
3588 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
3589 mode
= GET_MODE_WIDER_MODE (mode
))
3591 rtx tem
= rtx_alloc (CONST_DOUBLE
);
3592 union real_extract u
;
3594 bzero ((char *) &u
, sizeof u
); /* Zero any holes in a structure. */
3595 u
.d
= i
== 0 ? dconst0
: i
== 1 ? dconst1
: dconst2
;
3597 bcopy ((char *) &u
, (char *) &CONST_DOUBLE_LOW (tem
), sizeof u
);
3598 CONST_DOUBLE_MEM (tem
) = cc0_rtx
;
3599 PUT_MODE (tem
, mode
);
3601 const_tiny_rtx
[i
][(int) mode
] = tem
;
3604 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
3606 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3607 mode
= GET_MODE_WIDER_MODE (mode
))
3608 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
3610 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT
);
3612 mode
= GET_MODE_WIDER_MODE (mode
))
3613 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
3616 for (mode
= CCmode
; mode
< MAX_MACHINE_MODE
; ++mode
)
3617 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3618 const_tiny_rtx
[0][(int) mode
] = const0_rtx
;
3620 ggc_add_rtx_root (&const_tiny_rtx
[0][0], sizeof(const_tiny_rtx
)/sizeof(rtx
));
3621 ggc_add_rtx_root (&const_true_rtx
, 1);
3623 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3624 return_address_pointer_rtx
3625 = gen_rtx_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
3629 struct_value_rtx
= STRUCT_VALUE
;
3631 struct_value_rtx
= gen_rtx_REG (Pmode
, STRUCT_VALUE_REGNUM
);
3634 #ifdef STRUCT_VALUE_INCOMING
3635 struct_value_incoming_rtx
= STRUCT_VALUE_INCOMING
;
3637 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3638 struct_value_incoming_rtx
3639 = gen_rtx_REG (Pmode
, STRUCT_VALUE_INCOMING_REGNUM
);
3641 struct_value_incoming_rtx
= struct_value_rtx
;
3645 #ifdef STATIC_CHAIN_REGNUM
3646 static_chain_rtx
= gen_rtx_REG (Pmode
, STATIC_CHAIN_REGNUM
);
3648 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3649 if (STATIC_CHAIN_INCOMING_REGNUM
!= STATIC_CHAIN_REGNUM
)
3650 static_chain_incoming_rtx
3651 = gen_rtx_REG (Pmode
, STATIC_CHAIN_INCOMING_REGNUM
);
3654 static_chain_incoming_rtx
= static_chain_rtx
;
3658 static_chain_rtx
= STATIC_CHAIN
;
3660 #ifdef STATIC_CHAIN_INCOMING
3661 static_chain_incoming_rtx
= STATIC_CHAIN_INCOMING
;
3663 static_chain_incoming_rtx
= static_chain_rtx
;
3667 #ifdef PIC_OFFSET_TABLE_REGNUM
3668 pic_offset_table_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
3671 ggc_add_rtx_root (&pic_offset_table_rtx
, 1);
3672 ggc_add_rtx_root (&struct_value_rtx
, 1);
3673 ggc_add_rtx_root (&struct_value_incoming_rtx
, 1);
3674 ggc_add_rtx_root (&static_chain_rtx
, 1);
3675 ggc_add_rtx_root (&static_chain_incoming_rtx
, 1);
3676 ggc_add_rtx_root (&return_address_pointer_rtx
, 1);
3679 /* Query and clear/ restore no_line_numbers. This is used by the
3680 switch / case handling in stmt.c to give proper line numbers in
3681 warnings about unreachable code. */
3684 force_line_numbers ()
3686 int old
= no_line_numbers
;
3688 no_line_numbers
= 0;
3690 force_next_line_note ();
3695 restore_line_number_status (old_value
)
3698 no_line_numbers
= old_value
;