1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list
= 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from
;
99 unsigned HOST_WIDE_INT len
;
100 HOST_WIDE_INT offset
;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len
;
114 HOST_WIDE_INT offset
;
115 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
120 extern struct obstack permanent_obstack
;
122 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT
,
126 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
127 struct move_by_pieces
*));
128 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
130 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
132 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
134 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
136 struct store_by_pieces
*));
137 static rtx get_subtarget
PARAMS ((rtx
));
138 static int is_zeros_p
PARAMS ((tree
));
139 static int mostly_zeros_p
PARAMS ((tree
));
140 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
141 HOST_WIDE_INT
, enum machine_mode
,
142 tree
, tree
, int, int));
143 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
144 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
145 HOST_WIDE_INT
, enum machine_mode
,
146 tree
, enum machine_mode
, int, tree
,
148 static rtx var_rtx
PARAMS ((tree
));
149 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
150 static int is_aligning_offset
PARAMS ((tree
, tree
));
151 static rtx expand_increment
PARAMS ((tree
, int, int));
152 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
153 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
154 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
156 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
158 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
160 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load
[NUM_MACHINE_MODES
];
167 static char direct_store
[NUM_MACHINE_MODES
];
169 /* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
173 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
176 /* If we are optimizing for space (-Os), cut down the default move ratio. */
177 #define MOVE_RATIO (optimize_size ? 3 : 15)
181 /* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183 #ifndef MOVE_BY_PIECES_P
184 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 /* This array records the insn_code of insns to perform block moves. */
189 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
191 /* This array records the insn_code of insns to perform block clears. */
192 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
194 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run once per compilation to set up which modes can be used
201 directly in memory and to initialize the block move optab. */
207 enum machine_mode mode
;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
217 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
219 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
220 pat
= PATTERN (insn
);
222 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
223 mode
= (enum machine_mode
) ((int) mode
+ 1))
228 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
229 PUT_MODE (mem
, mode
);
230 PUT_MODE (mem1
, mode
);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
236 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
237 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
240 if (! HARD_REGNO_MODE_OK (regno
, mode
))
243 reg
= gen_rtx_REG (mode
, regno
);
246 SET_DEST (pat
) = reg
;
247 if (recog (pat
, insn
, &num_clobbers
) >= 0)
248 direct_load
[(int) mode
] = 1;
250 SET_SRC (pat
) = mem1
;
251 SET_DEST (pat
) = reg
;
252 if (recog (pat
, insn
, &num_clobbers
) >= 0)
253 direct_load
[(int) mode
] = 1;
256 SET_DEST (pat
) = mem
;
257 if (recog (pat
, insn
, &num_clobbers
) >= 0)
258 direct_store
[(int) mode
] = 1;
261 SET_DEST (pat
) = mem1
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_store
[(int) mode
] = 1;
270 /* This is run at the start of compiling a function. */
275 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
278 pending_stack_adjust
= 0;
279 stack_pointer_delta
= 0;
280 inhibit_defer_pop
= 0;
282 apply_args_value
= 0;
288 struct expr_status
*p
;
293 ggc_mark_rtx (p
->x_saveregs_value
);
294 ggc_mark_rtx (p
->x_apply_args_value
);
295 ggc_mark_rtx (p
->x_forced_labels
);
306 /* Small sanity check that the queue is empty at the end of a function. */
309 finish_expr_for_function ()
315 /* Manage the queue of increment instructions to be output
316 for POSTINCREMENT_EXPR expressions, etc. */
318 /* Queue up to increment (or change) VAR later. BODY says how:
319 BODY should be the same thing you would pass to emit_insn
320 to increment right away. It will go to emit_insn later on.
322 The value is a QUEUED expression to be used in place of VAR
323 where you want to guarantee the pre-incrementation value of VAR. */
326 enqueue_insn (var
, body
)
329 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
330 body
, pending_chain
);
331 return pending_chain
;
334 /* Use protect_from_queue to convert a QUEUED expression
335 into something that you can put immediately into an instruction.
336 If the queued incrementation has not happened yet,
337 protect_from_queue returns the variable itself.
338 If the incrementation has happened, protect_from_queue returns a temp
339 that contains a copy of the old value of the variable.
341 Any time an rtx which might possibly be a QUEUED is to be put
342 into an instruction, it must be passed through protect_from_queue first.
343 QUEUED expressions are not meaningful in instructions.
345 Do not pass a value through protect_from_queue and then hold
346 on to it for a while before putting it in an instruction!
347 If the queue is flushed in between, incorrect code will result. */
350 protect_from_queue (x
, modify
)
354 RTX_CODE code
= GET_CODE (x
);
356 #if 0 /* A QUEUED can hang around after the queue is forced out. */
357 /* Shortcut for most common case. */
358 if (pending_chain
== 0)
364 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
365 use of autoincrement. Make a copy of the contents of the memory
366 location rather than a copy of the address, but not if the value is
367 of mode BLKmode. Don't modify X in place since it might be
369 if (code
== MEM
&& GET_MODE (x
) != BLKmode
370 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
373 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
377 rtx temp
= gen_reg_rtx (GET_MODE (x
));
379 emit_insn_before (gen_move_insn (temp
, new),
384 /* Copy the address into a pseudo, so that the returned value
385 remains correct across calls to emit_queue. */
386 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
389 /* Otherwise, recursively protect the subexpressions of all
390 the kinds of rtx's that can contain a QUEUED. */
393 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
394 if (tem
!= XEXP (x
, 0))
400 else if (code
== PLUS
|| code
== MULT
)
402 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
403 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
404 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
413 /* If the increment has not happened, use the variable itself. Copy it
414 into a new pseudo so that the value remains correct across calls to
416 if (QUEUED_INSN (x
) == 0)
417 return copy_to_reg (QUEUED_VAR (x
));
418 /* If the increment has happened and a pre-increment copy exists,
420 if (QUEUED_COPY (x
) != 0)
421 return QUEUED_COPY (x
);
422 /* The increment has happened but we haven't set up a pre-increment copy.
423 Set one up now, and use it. */
424 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
425 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
427 return QUEUED_COPY (x
);
430 /* Return nonzero if X contains a QUEUED expression:
431 if it contains anything that will be altered by a queued increment.
432 We handle only combinations of MEM, PLUS, MINUS and MULT operators
433 since memory addresses generally contain only those. */
439 enum rtx_code code
= GET_CODE (x
);
445 return queued_subexp_p (XEXP (x
, 0));
449 return (queued_subexp_p (XEXP (x
, 0))
450 || queued_subexp_p (XEXP (x
, 1)));
456 /* Perform all the pending incrementations. */
462 while ((p
= pending_chain
))
464 rtx body
= QUEUED_BODY (p
);
466 if (GET_CODE (body
) == SEQUENCE
)
468 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
469 emit_insn (QUEUED_BODY (p
));
472 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
473 pending_chain
= QUEUED_NEXT (p
);
477 /* Copy data from FROM to TO, where the machine modes are not the same.
478 Both modes may be integer, or both may be floating.
479 UNSIGNEDP should be nonzero if FROM is an unsigned type.
480 This causes zero-extension instead of sign-extension. */
483 convert_move (to
, from
, unsignedp
)
487 enum machine_mode to_mode
= GET_MODE (to
);
488 enum machine_mode from_mode
= GET_MODE (from
);
489 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
490 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
494 /* rtx code for making an equivalent value. */
495 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
497 to
= protect_from_queue (to
, 1);
498 from
= protect_from_queue (from
, 0);
500 if (to_real
!= from_real
)
503 /* If FROM is a SUBREG that indicates that we have already done at least
504 the required extension, strip it. We don't handle such SUBREGs as
507 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
508 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
509 >= GET_MODE_SIZE (to_mode
))
510 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
511 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
513 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
516 if (to_mode
== from_mode
517 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
519 emit_move_insn (to
, from
);
523 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
525 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
528 if (VECTOR_MODE_P (to_mode
))
529 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
531 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
533 emit_move_insn (to
, from
);
537 if (to_real
!= from_real
)
544 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
546 /* Try converting directly if the insn is supported. */
547 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
550 emit_unop_insn (code
, to
, from
, UNKNOWN
);
555 #ifdef HAVE_trunchfqf2
556 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
558 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
562 #ifdef HAVE_trunctqfqf2
563 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
565 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
569 #ifdef HAVE_truncsfqf2
570 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
572 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
576 #ifdef HAVE_truncdfqf2
577 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
579 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
583 #ifdef HAVE_truncxfqf2
584 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
586 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
590 #ifdef HAVE_trunctfqf2
591 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
593 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_trunctqfhf2
599 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
601 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
605 #ifdef HAVE_truncsfhf2
606 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
608 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
612 #ifdef HAVE_truncdfhf2
613 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
615 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
619 #ifdef HAVE_truncxfhf2
620 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
622 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_trunctfhf2
627 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
629 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_truncsftqf2
635 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
637 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_truncdftqf2
642 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
644 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncxftqf2
649 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
651 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_trunctftqf2
656 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
658 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_truncdfsf2
664 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
666 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncxfsf2
671 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
673 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctfsf2
678 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
680 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncxfdf2
685 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
687 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_trunctfdf2
692 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
694 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
706 libcall
= extendsfdf2_libfunc
;
710 libcall
= extendsfxf2_libfunc
;
714 libcall
= extendsftf2_libfunc
;
726 libcall
= truncdfsf2_libfunc
;
730 libcall
= extenddfxf2_libfunc
;
734 libcall
= extenddftf2_libfunc
;
746 libcall
= truncxfsf2_libfunc
;
750 libcall
= truncxfdf2_libfunc
;
762 libcall
= trunctfsf2_libfunc
;
766 libcall
= trunctfdf2_libfunc
;
778 if (libcall
== (rtx
) 0)
779 /* This conversion is not implemented yet. */
783 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
785 insns
= get_insns ();
787 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
792 /* Now both modes are integers. */
794 /* Handle expanding beyond a word. */
795 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
796 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
803 enum machine_mode lowpart_mode
;
804 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
806 /* Try converting directly if the insn is supported. */
807 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
810 /* If FROM is a SUBREG, put it into a register. Do this
811 so that we always generate the same set of insns for
812 better cse'ing; if an intermediate assignment occurred,
813 we won't be doing the operation directly on the SUBREG. */
814 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
815 from
= force_reg (from_mode
, from
);
816 emit_unop_insn (code
, to
, from
, equiv_code
);
819 /* Next, try converting via full word. */
820 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
821 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
822 != CODE_FOR_nothing
))
824 if (GET_CODE (to
) == REG
)
825 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
826 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
827 emit_unop_insn (code
, to
,
828 gen_lowpart (word_mode
, to
), equiv_code
);
832 /* No special multiword conversion insn; do it by hand. */
835 /* Since we will turn this into a no conflict block, we must ensure
836 that the source does not overlap the target. */
838 if (reg_overlap_mentioned_p (to
, from
))
839 from
= force_reg (from_mode
, from
);
841 /* Get a copy of FROM widened to a word, if necessary. */
842 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
843 lowpart_mode
= word_mode
;
845 lowpart_mode
= from_mode
;
847 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
849 lowpart
= gen_lowpart (lowpart_mode
, to
);
850 emit_move_insn (lowpart
, lowfrom
);
852 /* Compute the value to put in each remaining word. */
854 fill_value
= const0_rtx
;
859 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
860 && STORE_FLAG_VALUE
== -1)
862 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
864 fill_value
= gen_reg_rtx (word_mode
);
865 emit_insn (gen_slt (fill_value
));
871 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
872 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
874 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
878 /* Fill the remaining words. */
879 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
881 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
882 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
887 if (fill_value
!= subword
)
888 emit_move_insn (subword
, fill_value
);
891 insns
= get_insns ();
894 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
895 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
899 /* Truncating multi-word to a word or less. */
900 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
901 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
903 if (!((GET_CODE (from
) == MEM
904 && ! MEM_VOLATILE_P (from
)
905 && direct_load
[(int) to_mode
]
906 && ! mode_dependent_address_p (XEXP (from
, 0)))
907 || GET_CODE (from
) == REG
908 || GET_CODE (from
) == SUBREG
))
909 from
= force_reg (from_mode
, from
);
910 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
914 /* Handle pointer conversion. */ /* SPEE 900220. */
915 if (to_mode
== PQImode
)
917 if (from_mode
!= QImode
)
918 from
= convert_to_mode (QImode
, from
, unsignedp
);
920 #ifdef HAVE_truncqipqi2
921 if (HAVE_truncqipqi2
)
923 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
926 #endif /* HAVE_truncqipqi2 */
930 if (from_mode
== PQImode
)
932 if (to_mode
!= QImode
)
934 from
= convert_to_mode (QImode
, from
, unsignedp
);
939 #ifdef HAVE_extendpqiqi2
940 if (HAVE_extendpqiqi2
)
942 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
945 #endif /* HAVE_extendpqiqi2 */
950 if (to_mode
== PSImode
)
952 if (from_mode
!= SImode
)
953 from
= convert_to_mode (SImode
, from
, unsignedp
);
955 #ifdef HAVE_truncsipsi2
956 if (HAVE_truncsipsi2
)
958 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
961 #endif /* HAVE_truncsipsi2 */
965 if (from_mode
== PSImode
)
967 if (to_mode
!= SImode
)
969 from
= convert_to_mode (SImode
, from
, unsignedp
);
974 #ifdef HAVE_extendpsisi2
975 if (! unsignedp
&& HAVE_extendpsisi2
)
977 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
980 #endif /* HAVE_extendpsisi2 */
981 #ifdef HAVE_zero_extendpsisi2
982 if (unsignedp
&& HAVE_zero_extendpsisi2
)
984 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
987 #endif /* HAVE_zero_extendpsisi2 */
992 if (to_mode
== PDImode
)
994 if (from_mode
!= DImode
)
995 from
= convert_to_mode (DImode
, from
, unsignedp
);
997 #ifdef HAVE_truncdipdi2
998 if (HAVE_truncdipdi2
)
1000 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1003 #endif /* HAVE_truncdipdi2 */
1007 if (from_mode
== PDImode
)
1009 if (to_mode
!= DImode
)
1011 from
= convert_to_mode (DImode
, from
, unsignedp
);
1016 #ifdef HAVE_extendpdidi2
1017 if (HAVE_extendpdidi2
)
1019 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1022 #endif /* HAVE_extendpdidi2 */
1027 /* Now follow all the conversions between integers
1028 no more than a word long. */
1030 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1031 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1032 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1033 GET_MODE_BITSIZE (from_mode
)))
1035 if (!((GET_CODE (from
) == MEM
1036 && ! MEM_VOLATILE_P (from
)
1037 && direct_load
[(int) to_mode
]
1038 && ! mode_dependent_address_p (XEXP (from
, 0)))
1039 || GET_CODE (from
) == REG
1040 || GET_CODE (from
) == SUBREG
))
1041 from
= force_reg (from_mode
, from
);
1042 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1043 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1044 from
= copy_to_reg (from
);
1045 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1049 /* Handle extension. */
1050 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1052 /* Convert directly if that works. */
1053 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1054 != CODE_FOR_nothing
)
1057 from
= force_not_mem (from
);
1059 emit_unop_insn (code
, to
, from
, equiv_code
);
1064 enum machine_mode intermediate
;
1068 /* Search for a mode to convert via. */
1069 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1070 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1071 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1072 != CODE_FOR_nothing
)
1073 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1074 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1075 GET_MODE_BITSIZE (intermediate
))))
1076 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1077 != CODE_FOR_nothing
))
1079 convert_move (to
, convert_to_mode (intermediate
, from
,
1080 unsignedp
), unsignedp
);
1084 /* No suitable intermediate mode.
1085 Generate what we need with shifts. */
1086 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1087 - GET_MODE_BITSIZE (from_mode
), 0);
1088 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1089 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1091 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1094 emit_move_insn (to
, tmp
);
1099 /* Support special truncate insns for certain modes. */
1101 if (from_mode
== DImode
&& to_mode
== SImode
)
1103 #ifdef HAVE_truncdisi2
1104 if (HAVE_truncdisi2
)
1106 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1110 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1114 if (from_mode
== DImode
&& to_mode
== HImode
)
1116 #ifdef HAVE_truncdihi2
1117 if (HAVE_truncdihi2
)
1119 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== DImode
&& to_mode
== QImode
)
1129 #ifdef HAVE_truncdiqi2
1130 if (HAVE_truncdiqi2
)
1132 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== SImode
&& to_mode
== HImode
)
1142 #ifdef HAVE_truncsihi2
1143 if (HAVE_truncsihi2
)
1145 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== SImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_truncsiqi2
1156 if (HAVE_truncsiqi2
)
1158 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== HImode
&& to_mode
== QImode
)
1168 #ifdef HAVE_trunchiqi2
1169 if (HAVE_trunchiqi2
)
1171 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== TImode
&& to_mode
== DImode
)
1181 #ifdef HAVE_trunctidi2
1182 if (HAVE_trunctidi2
)
1184 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== SImode
)
1194 #ifdef HAVE_trunctisi2
1195 if (HAVE_trunctisi2
)
1197 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== HImode
)
1207 #ifdef HAVE_trunctihi2
1208 if (HAVE_trunctihi2
)
1210 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== TImode
&& to_mode
== QImode
)
1220 #ifdef HAVE_trunctiqi2
1221 if (HAVE_trunctiqi2
)
1223 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 /* Handle truncation of volatile memrefs, and so on;
1232 the things that couldn't be truncated directly,
1233 and for which there was no special instruction. */
1234 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1236 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1237 emit_move_insn (to
, temp
);
1241 /* Mode combination is not recognized. */
1245 /* Return an rtx for a value that would result
1246 from converting X to mode MODE.
1247 Both X and MODE may be floating, or both integer.
1248 UNSIGNEDP is nonzero if X is an unsigned value.
1249 This can be done by referring to a part of X in place
1250 or by copying to a new temporary with conversion.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_to_mode (mode
, x
, unsignedp
)
1257 enum machine_mode mode
;
1261 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1264 /* Return an rtx for a value that would result
1265 from converting X from mode OLDMODE to mode MODE.
1266 Both modes may be floating, or both integer.
1267 UNSIGNEDP is nonzero if X is an unsigned value.
1269 This can be done by referring to a part of X in place
1270 or by copying to a new temporary with conversion.
1272 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_modes (mode
, oldmode
, x
, unsignedp
)
1279 enum machine_mode mode
, oldmode
;
1285 /* If FROM is a SUBREG that indicates that we have already done at least
1286 the required extension, strip it. */
1288 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1289 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1290 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1291 x
= gen_lowpart (mode
, x
);
1293 if (GET_MODE (x
) != VOIDmode
)
1294 oldmode
= GET_MODE (x
);
1296 if (mode
== oldmode
)
1299 /* There is one case that we must handle specially: If we are converting
1300 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1301 we are to interpret the constant as unsigned, gen_lowpart will do
1302 the wrong if the constant appears negative. What we want to do is
1303 make the high-order word of the constant zero, not all ones. */
1305 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1306 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1307 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1309 HOST_WIDE_INT val
= INTVAL (x
);
1311 if (oldmode
!= VOIDmode
1312 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1314 int width
= GET_MODE_BITSIZE (oldmode
);
1316 /* We need to zero extend VAL. */
1317 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1320 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1323 /* We can do this with a gen_lowpart if both desired and current modes
1324 are integer, and this is either a constant integer, a register, or a
1325 non-volatile MEM. Except for the constant case where MODE is no
1326 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1328 if ((GET_CODE (x
) == CONST_INT
1329 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1330 || (GET_MODE_CLASS (mode
) == MODE_INT
1331 && GET_MODE_CLASS (oldmode
) == MODE_INT
1332 && (GET_CODE (x
) == CONST_DOUBLE
1333 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1334 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1335 && direct_load
[(int) mode
])
1336 || (GET_CODE (x
) == REG
1337 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1338 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1340 /* ?? If we don't know OLDMODE, we have to assume here that
1341 X does not need sign- or zero-extension. This may not be
1342 the case, but it's the best we can do. */
1343 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1344 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1346 HOST_WIDE_INT val
= INTVAL (x
);
1347 int width
= GET_MODE_BITSIZE (oldmode
);
1349 /* We must sign or zero-extend in this case. Start by
1350 zero-extending, then sign extend if we need to. */
1351 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1353 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1354 val
|= (HOST_WIDE_INT
) (-1) << width
;
1356 return gen_int_mode (val
, mode
);
1359 return gen_lowpart (mode
, x
);
1362 temp
= gen_reg_rtx (mode
);
1363 convert_move (temp
, x
, unsignedp
);
1367 /* This macro is used to determine what the largest unit size that
1368 move_by_pieces can use is. */
1370 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1371 move efficiently, as opposed to MOVE_MAX which is the maximum
1372 number of bytes we can move with a single instruction. */
1374 #ifndef MOVE_MAX_PIECES
1375 #define MOVE_MAX_PIECES MOVE_MAX
1378 /* Generate several move instructions to copy LEN bytes from block FROM to
1379 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1380 and TO through protect_from_queue before calling.
1382 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1383 used to push FROM to the stack.
1385 ALIGN is maximum alignment we can assume. */
1388 move_by_pieces (to
, from
, len
, align
)
1390 unsigned HOST_WIDE_INT len
;
1393 struct move_by_pieces data
;
1394 rtx to_addr
, from_addr
= XEXP (from
, 0);
1395 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1396 enum machine_mode mode
= VOIDmode
, tmode
;
1397 enum insn_code icode
;
1400 data
.from_addr
= from_addr
;
1403 to_addr
= XEXP (to
, 0);
1406 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1407 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1409 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1416 #ifdef STACK_GROWS_DOWNWARD
1422 data
.to_addr
= to_addr
;
1425 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1426 || GET_CODE (from_addr
) == POST_INC
1427 || GET_CODE (from_addr
) == POST_DEC
);
1429 data
.explicit_inc_from
= 0;
1430 data
.explicit_inc_to
= 0;
1431 if (data
.reverse
) data
.offset
= len
;
1434 /* If copying requires more than two move insns,
1435 copy addresses to registers (to make displacements shorter)
1436 and use post-increment if available. */
1437 if (!(data
.autinc_from
&& data
.autinc_to
)
1438 && move_by_pieces_ninsns (len
, align
) > 2)
1440 /* Find the mode of the largest move... */
1441 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1442 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1443 if (GET_MODE_SIZE (tmode
) < max_size
)
1446 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1448 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1449 data
.autinc_from
= 1;
1450 data
.explicit_inc_from
= -1;
1452 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1454 data
.from_addr
= copy_addr_to_reg (from_addr
);
1455 data
.autinc_from
= 1;
1456 data
.explicit_inc_from
= 1;
1458 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1459 data
.from_addr
= copy_addr_to_reg (from_addr
);
1460 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1462 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1464 data
.explicit_inc_to
= -1;
1466 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1468 data
.to_addr
= copy_addr_to_reg (to_addr
);
1470 data
.explicit_inc_to
= 1;
1472 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1473 data
.to_addr
= copy_addr_to_reg (to_addr
);
1476 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1477 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1478 align
= MOVE_MAX
* BITS_PER_UNIT
;
1480 /* First move what we can in the largest integer mode, then go to
1481 successively smaller modes. */
1483 while (max_size
> 1)
1485 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1486 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1487 if (GET_MODE_SIZE (tmode
) < max_size
)
1490 if (mode
== VOIDmode
)
1493 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1494 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1495 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1497 max_size
= GET_MODE_SIZE (mode
);
1500 /* The code above should have handled everything. */
1505 /* Return number of insns required to move L bytes by pieces.
1506 ALIGN (in bits) is maximum alignment we can assume. */
1508 static unsigned HOST_WIDE_INT
1509 move_by_pieces_ninsns (l
, align
)
1510 unsigned HOST_WIDE_INT l
;
1513 unsigned HOST_WIDE_INT n_insns
= 0;
1514 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1516 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1517 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1518 align
= MOVE_MAX
* BITS_PER_UNIT
;
1520 while (max_size
> 1)
1522 enum machine_mode mode
= VOIDmode
, tmode
;
1523 enum insn_code icode
;
1525 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1526 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1527 if (GET_MODE_SIZE (tmode
) < max_size
)
1530 if (mode
== VOIDmode
)
1533 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1534 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1535 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1537 max_size
= GET_MODE_SIZE (mode
);
1545 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1546 with move instructions for mode MODE. GENFUN is the gen_... function
1547 to make a move insn for that mode. DATA has all the other info. */
1550 move_by_pieces_1 (genfun
, mode
, data
)
1551 rtx (*genfun
) PARAMS ((rtx
, ...));
1552 enum machine_mode mode
;
1553 struct move_by_pieces
*data
;
1555 unsigned int size
= GET_MODE_SIZE (mode
);
1556 rtx to1
= NULL_RTX
, from1
;
1558 while (data
->len
>= size
)
1561 data
->offset
-= size
;
1565 if (data
->autinc_to
)
1566 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1569 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1572 if (data
->autinc_from
)
1573 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1576 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1578 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1579 emit_insn (gen_add2_insn (data
->to_addr
,
1580 GEN_INT (-(HOST_WIDE_INT
)size
)));
1581 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1582 emit_insn (gen_add2_insn (data
->from_addr
,
1583 GEN_INT (-(HOST_WIDE_INT
)size
)));
1586 emit_insn ((*genfun
) (to1
, from1
));
1589 #ifdef PUSH_ROUNDING
1590 emit_single_push_insn (mode
, from1
, NULL
);
1596 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1597 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1598 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1599 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1601 if (! data
->reverse
)
1602 data
->offset
+= size
;
1608 /* Emit code to move a block Y to a block X.
1609 This may be done with string-move instructions,
1610 with multiple scalar move instructions, or with a library call.
1612 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1614 SIZE is an rtx that says how long they are.
1615 ALIGN is the maximum alignment we can assume they have.
1617 Return the address of the new block, if memcpy is called and returns it,
1621 emit_block_move (x
, y
, size
)
1626 #ifdef TARGET_MEM_FUNCTIONS
1628 tree call_expr
, arg_list
;
1630 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1632 if (GET_MODE (x
) != BLKmode
)
1635 if (GET_MODE (y
) != BLKmode
)
1638 x
= protect_from_queue (x
, 1);
1639 y
= protect_from_queue (y
, 0);
1640 size
= protect_from_queue (size
, 0);
1642 if (GET_CODE (x
) != MEM
)
1644 if (GET_CODE (y
) != MEM
)
1649 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1650 move_by_pieces (x
, y
, INTVAL (size
), align
);
1653 /* Try the most limited insn first, because there's no point
1654 including more than one in the machine description unless
1655 the more limited one has some advantage. */
1657 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1658 enum machine_mode mode
;
1660 /* Since this is a move insn, we don't care about volatility. */
1663 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1664 mode
= GET_MODE_WIDER_MODE (mode
))
1666 enum insn_code code
= movstr_optab
[(int) mode
];
1667 insn_operand_predicate_fn pred
;
1669 if (code
!= CODE_FOR_nothing
1670 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1671 here because if SIZE is less than the mode mask, as it is
1672 returned by the macro, it will definitely be less than the
1673 actual mode mask. */
1674 && ((GET_CODE (size
) == CONST_INT
1675 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1676 <= (GET_MODE_MASK (mode
) >> 1)))
1677 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1678 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1679 || (*pred
) (x
, BLKmode
))
1680 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1681 || (*pred
) (y
, BLKmode
))
1682 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1683 || (*pred
) (opalign
, VOIDmode
)))
1686 rtx last
= get_last_insn ();
1689 op2
= convert_to_mode (mode
, size
, 1);
1690 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1691 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1692 op2
= copy_to_mode_reg (mode
, op2
);
1694 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1702 delete_insns_since (last
);
1708 /* X, Y, or SIZE may have been passed through protect_from_queue.
1710 It is unsafe to save the value generated by protect_from_queue
1711 and reuse it later. Consider what happens if emit_queue is
1712 called before the return value from protect_from_queue is used.
1714 Expansion of the CALL_EXPR below will call emit_queue before
1715 we are finished emitting RTL for argument setup. So if we are
1716 not careful we could get the wrong value for an argument.
1718 To avoid this problem we go ahead and emit code to copy X, Y &
1719 SIZE into new pseudos. We can then place those new pseudos
1720 into an RTL_EXPR and use them later, even after a call to
1723 Note this is not strictly needed for library calls since they
1724 do not call emit_queue before loading their arguments. However,
1725 we may need to have library calls call emit_queue in the future
1726 since failing to do so could cause problems for targets which
1727 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1728 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1729 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1731 #ifdef TARGET_MEM_FUNCTIONS
1732 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1734 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1735 TREE_UNSIGNED (integer_type_node
));
1736 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1739 #ifdef TARGET_MEM_FUNCTIONS
1740 /* It is incorrect to use the libcall calling conventions to call
1741 memcpy in this context.
1743 This could be a user call to memcpy and the user may wish to
1744 examine the return value from memcpy.
1746 For targets where libcalls and normal calls have different conventions
1747 for returning pointers, we could end up generating incorrect code.
1749 So instead of using a libcall sequence we build up a suitable
1750 CALL_EXPR and expand the call in the normal fashion. */
1751 if (fn
== NULL_TREE
)
1755 /* This was copied from except.c, I don't know if all this is
1756 necessary in this context or not. */
1757 fn
= get_identifier ("memcpy");
1758 fntype
= build_pointer_type (void_type_node
);
1759 fntype
= build_function_type (fntype
, NULL_TREE
);
1760 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1761 ggc_add_tree_root (&fn
, 1);
1762 DECL_EXTERNAL (fn
) = 1;
1763 TREE_PUBLIC (fn
) = 1;
1764 DECL_ARTIFICIAL (fn
) = 1;
1765 TREE_NOTHROW (fn
) = 1;
1766 make_decl_rtl (fn
, NULL
);
1767 assemble_external (fn
);
1770 /* We need to make an argument list for the function call.
1772 memcpy has three arguments, the first two are void * addresses and
1773 the last is a size_t byte count for the copy. */
1775 = build_tree_list (NULL_TREE
,
1776 make_tree (build_pointer_type (void_type_node
), x
));
1777 TREE_CHAIN (arg_list
)
1778 = build_tree_list (NULL_TREE
,
1779 make_tree (build_pointer_type (void_type_node
), y
));
1780 TREE_CHAIN (TREE_CHAIN (arg_list
))
1781 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1782 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1784 /* Now we have to build up the CALL_EXPR itself. */
1785 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1786 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1787 call_expr
, arg_list
, NULL_TREE
);
1788 TREE_SIDE_EFFECTS (call_expr
) = 1;
1790 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1792 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1793 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1794 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1795 TREE_UNSIGNED (integer_type_node
)),
1796 TYPE_MODE (integer_type_node
));
1799 /* If we are initializing a readonly value, show the above call
1800 clobbered it. Otherwise, a load from it may erroneously be hoisted
1802 if (RTX_UNCHANGING_P (x
))
1803 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1809 /* Copy all or part of a value X into registers starting at REGNO.
1810 The number of registers to be filled is NREGS. */
1813 move_block_to_reg (regno
, x
, nregs
, mode
)
1817 enum machine_mode mode
;
1820 #ifdef HAVE_load_multiple
1828 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1829 x
= validize_mem (force_const_mem (mode
, x
));
1831 /* See if the machine can do this with a load multiple insn. */
1832 #ifdef HAVE_load_multiple
1833 if (HAVE_load_multiple
)
1835 last
= get_last_insn ();
1836 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1844 delete_insns_since (last
);
1848 for (i
= 0; i
< nregs
; i
++)
1849 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1850 operand_subword_force (x
, i
, mode
));
1853 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1854 The number of registers to be filled is NREGS. SIZE indicates the number
1855 of bytes in the object X. */
1858 move_block_from_reg (regno
, x
, nregs
, size
)
1865 #ifdef HAVE_store_multiple
1869 enum machine_mode mode
;
1874 /* If SIZE is that of a mode no bigger than a word, just use that
1875 mode's store operation. */
1876 if (size
<= UNITS_PER_WORD
1877 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
1878 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1880 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1884 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1885 to the left before storing to memory. Note that the previous test
1886 doesn't handle all cases (e.g. SIZE == 3). */
1887 if (size
< UNITS_PER_WORD
1889 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
)
1891 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1897 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1898 gen_rtx_REG (word_mode
, regno
),
1899 build_int_2 ((UNITS_PER_WORD
- size
)
1900 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1901 emit_move_insn (tem
, shift
);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple
)
1909 last
= get_last_insn ();
1910 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1918 delete_insns_since (last
);
1922 for (i
= 0; i
< nregs
; i
++)
1924 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1929 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944 emit_group_load (dst
, orig_src
, ssize
)
1951 if (GET_CODE (dst
) != PARALLEL
)
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1961 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1963 /* Process the pieces. */
1964 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1966 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1967 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1968 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1974 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1975 bytelen
= ssize
- bytepos
;
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1984 if (GET_CODE (orig_src
) != MEM
1985 && (!CONSTANT_P (orig_src
)
1986 || (GET_MODE (orig_src
) != mode
1987 && GET_MODE (orig_src
) != VOIDmode
)))
1989 if (GET_MODE (orig_src
) == VOIDmode
)
1990 src
= gen_reg_rtx (mode
);
1992 src
= gen_reg_rtx (GET_MODE (orig_src
));
1994 emit_move_insn (src
, orig_src
);
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src
) == MEM
1999 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2000 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2001 && bytelen
== GET_MODE_SIZE (mode
))
2003 tmps
[i
] = gen_reg_rtx (mode
);
2004 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2006 else if (GET_CODE (src
) == CONCAT
)
2009 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2010 || (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2011 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1)))))
2013 tmps
[i
] = XEXP (src
, bytepos
!= 0);
2014 if (! CONSTANT_P (tmps
[i
])
2015 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2016 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2017 0, 1, NULL_RTX
, mode
, mode
, ssize
);
2019 else if (bytepos
== 0)
2021 rtx mem
= assign_stack_temp (GET_MODE (src
),
2022 GET_MODE_SIZE (GET_MODE (src
)), 0);
2023 emit_move_insn (mem
, src
);
2024 tmps
[i
] = adjust_address (mem
, mode
, 0);
2029 else if (CONSTANT_P (src
)
2030 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2033 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2034 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2037 if (BYTES_BIG_ENDIAN
&& shift
)
2038 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2039 tmps
[i
], 0, OPTAB_WIDEN
);
2044 /* Copy the extracted pieces into the proper (probable) hard regs. */
2045 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2046 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2049 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2050 registers represented by a PARALLEL. SSIZE represents the total size of
2051 block DST, or -1 if not known. */
2054 emit_group_store (orig_dst
, src
, ssize
)
2061 if (GET_CODE (src
) != PARALLEL
)
2064 /* Check for a NULL entry, used to indicate that the parameter goes
2065 both on the stack and in registers. */
2066 if (XEXP (XVECEXP (src
, 0, 0), 0))
2071 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2073 /* Copy the (probable) hard regs into pseudos. */
2074 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2076 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2077 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2078 emit_move_insn (tmps
[i
], reg
);
2082 /* If we won't be storing directly into memory, protect the real destination
2083 from strange tricks we might play. */
2085 if (GET_CODE (dst
) == PARALLEL
)
2089 /* We can get a PARALLEL dst if there is a conditional expression in
2090 a return statement. In that case, the dst and src are the same,
2091 so no action is necessary. */
2092 if (rtx_equal_p (dst
, src
))
2095 /* It is unclear if we can ever reach here, but we may as well handle
2096 it. Allocate a temporary, and split this into a store/load to/from
2099 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2100 emit_group_store (temp
, src
, ssize
);
2101 emit_group_load (dst
, temp
, ssize
);
2104 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2106 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2107 /* Make life a bit easier for combine. */
2108 emit_move_insn (dst
, const0_rtx
);
2111 /* Process the pieces. */
2112 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2114 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2115 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2116 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2119 /* Handle trailing fragments that run over the size of the struct. */
2120 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2122 if (BYTES_BIG_ENDIAN
)
2124 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2125 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2126 tmps
[i
], 0, OPTAB_WIDEN
);
2128 bytelen
= ssize
- bytepos
;
2131 if (GET_CODE (dst
) == CONCAT
)
2133 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2134 dest
= XEXP (dst
, 0);
2135 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2137 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2138 dest
= XEXP (dst
, 1);
2144 /* Optimize the access just a bit. */
2145 if (GET_CODE (dest
) == MEM
2146 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2147 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2148 && bytelen
== GET_MODE_SIZE (mode
))
2149 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2151 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2152 mode
, tmps
[i
], ssize
);
2157 /* Copy from the pseudo into the (probable) hard reg. */
2158 if (GET_CODE (dst
) == REG
)
2159 emit_move_insn (orig_dst
, dst
);
2162 /* Generate code to copy a BLKmode object of TYPE out of a
2163 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2164 is null, a stack temporary is created. TGTBLK is returned.
2166 The primary purpose of this routine is to handle functions
2167 that return BLKmode structures in registers. Some machines
2168 (the PA for example) want to return all small structures
2169 in registers regardless of the structure's alignment. */
2172 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2177 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2178 rtx src
= NULL
, dst
= NULL
;
2179 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2180 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2184 tgtblk
= assign_temp (build_qualified_type (type
,
2186 | TYPE_QUAL_CONST
)),
2188 preserve_temp_slots (tgtblk
);
2191 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2192 into a new pseudo which is a full word.
2194 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2195 the wrong part of the register gets copied so we fake a type conversion
2197 if (GET_MODE (srcreg
) != BLKmode
2198 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2200 if (FUNCTION_ARG_REG_LITTLE_ENDIAN
)
2201 srcreg
= simplify_gen_subreg (word_mode
, srcreg
, GET_MODE (srcreg
), 0);
2203 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2206 /* Structures whose size is not a multiple of a word are aligned
2207 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2208 machine, this means we must skip the empty high order bytes when
2209 calculating the bit offset. */
2210 if (BYTES_BIG_ENDIAN
2211 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2212 && bytes
% UNITS_PER_WORD
)
2213 big_endian_correction
2214 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2216 /* Copy the structure BITSIZE bites at a time.
2218 We could probably emit more efficient code for machines which do not use
2219 strict alignment, but it doesn't seem worth the effort at the current
2221 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2222 bitpos
< bytes
* BITS_PER_UNIT
;
2223 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2225 /* We need a new source operand each time xbitpos is on a
2226 word boundary and when xbitpos == big_endian_correction
2227 (the first time through). */
2228 if (xbitpos
% BITS_PER_WORD
== 0
2229 || xbitpos
== big_endian_correction
)
2230 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2233 /* We need a new destination operand each time bitpos is on
2235 if (bitpos
% BITS_PER_WORD
== 0)
2236 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2238 /* Use xbitpos for the source extraction (right justified) and
2239 xbitpos for the destination store (left justified). */
2240 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2241 extract_bit_field (src
, bitsize
,
2242 xbitpos
% BITS_PER_WORD
, 1,
2243 NULL_RTX
, word_mode
, word_mode
,
2251 /* Add a USE expression for REG to the (possibly empty) list pointed
2252 to by CALL_FUSAGE. REG must denote a hard register. */
2255 use_reg (call_fusage
, reg
)
2256 rtx
*call_fusage
, reg
;
2258 if (GET_CODE (reg
) != REG
2259 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2263 = gen_rtx_EXPR_LIST (VOIDmode
,
2264 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2267 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2268 starting at REGNO. All of these registers must be hard registers. */
2271 use_regs (call_fusage
, regno
, nregs
)
2278 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2281 for (i
= 0; i
< nregs
; i
++)
2282 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2285 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2286 PARALLEL REGS. This is for calls that pass values in multiple
2287 non-contiguous locations. The Irix 6 ABI has examples of this. */
2290 use_group_regs (call_fusage
, regs
)
2296 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2298 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2300 /* A NULL entry means the parameter goes both on the stack and in
2301 registers. This can also be a MEM for targets that pass values
2302 partially on the stack and partially in registers. */
2303 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2304 use_reg (call_fusage
, reg
);
2310 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2311 unsigned HOST_WIDE_INT len
;
2312 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2316 unsigned HOST_WIDE_INT max_size
, l
;
2317 HOST_WIDE_INT offset
= 0;
2318 enum machine_mode mode
, tmode
;
2319 enum insn_code icode
;
2323 if (! MOVE_BY_PIECES_P (len
, align
))
2326 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2327 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2328 align
= MOVE_MAX
* BITS_PER_UNIT
;
2330 /* We would first store what we can in the largest integer mode, then go to
2331 successively smaller modes. */
2334 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2339 max_size
= MOVE_MAX_PIECES
+ 1;
2340 while (max_size
> 1)
2342 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2343 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2344 if (GET_MODE_SIZE (tmode
) < max_size
)
2347 if (mode
== VOIDmode
)
2350 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2351 if (icode
!= CODE_FOR_nothing
2352 && align
>= GET_MODE_ALIGNMENT (mode
))
2354 unsigned int size
= GET_MODE_SIZE (mode
);
2361 cst
= (*constfun
) (constfundata
, offset
, mode
);
2362 if (!LEGITIMATE_CONSTANT_P (cst
))
2372 max_size
= GET_MODE_SIZE (mode
);
2375 /* The code above should have handled everything. */
2383 /* Generate several move instructions to store LEN bytes generated by
2384 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2385 pointer which will be passed as argument in every CONSTFUN call.
2386 ALIGN is maximum alignment we can assume. */
2389 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2391 unsigned HOST_WIDE_INT len
;
2392 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2396 struct store_by_pieces data
;
2398 if (! MOVE_BY_PIECES_P (len
, align
))
2400 to
= protect_from_queue (to
, 1);
2401 data
.constfun
= constfun
;
2402 data
.constfundata
= constfundata
;
2405 store_by_pieces_1 (&data
, align
);
2408 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2409 rtx with BLKmode). The caller must pass TO through protect_from_queue
2410 before calling. ALIGN is maximum alignment we can assume. */
2413 clear_by_pieces (to
, len
, align
)
2415 unsigned HOST_WIDE_INT len
;
2418 struct store_by_pieces data
;
2420 data
.constfun
= clear_by_pieces_1
;
2421 data
.constfundata
= NULL
;
2424 store_by_pieces_1 (&data
, align
);
2427 /* Callback routine for clear_by_pieces.
2428 Return const0_rtx unconditionally. */
2431 clear_by_pieces_1 (data
, offset
, mode
)
2432 PTR data ATTRIBUTE_UNUSED
;
2433 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2434 enum machine_mode mode ATTRIBUTE_UNUSED
;
2439 /* Subroutine of clear_by_pieces and store_by_pieces.
2440 Generate several move instructions to store LEN bytes of block TO. (A MEM
2441 rtx with BLKmode). The caller must pass TO through protect_from_queue
2442 before calling. ALIGN is maximum alignment we can assume. */
2445 store_by_pieces_1 (data
, align
)
2446 struct store_by_pieces
*data
;
2449 rtx to_addr
= XEXP (data
->to
, 0);
2450 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2451 enum machine_mode mode
= VOIDmode
, tmode
;
2452 enum insn_code icode
;
2455 data
->to_addr
= to_addr
;
2457 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2458 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2460 data
->explicit_inc_to
= 0;
2462 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2464 data
->offset
= data
->len
;
2466 /* If storing requires more than two move insns,
2467 copy addresses to registers (to make displacements shorter)
2468 and use post-increment if available. */
2469 if (!data
->autinc_to
2470 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2472 /* Determine the main mode we'll be using. */
2473 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2474 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2475 if (GET_MODE_SIZE (tmode
) < max_size
)
2478 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2480 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2481 data
->autinc_to
= 1;
2482 data
->explicit_inc_to
= -1;
2485 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2486 && ! data
->autinc_to
)
2488 data
->to_addr
= copy_addr_to_reg (to_addr
);
2489 data
->autinc_to
= 1;
2490 data
->explicit_inc_to
= 1;
2493 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2494 data
->to_addr
= copy_addr_to_reg (to_addr
);
2497 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2498 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2499 align
= MOVE_MAX
* BITS_PER_UNIT
;
2501 /* First store what we can in the largest integer mode, then go to
2502 successively smaller modes. */
2504 while (max_size
> 1)
2506 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2507 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2508 if (GET_MODE_SIZE (tmode
) < max_size
)
2511 if (mode
== VOIDmode
)
2514 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2515 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2516 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2518 max_size
= GET_MODE_SIZE (mode
);
2521 /* The code above should have handled everything. */
2526 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2527 with move instructions for mode MODE. GENFUN is the gen_... function
2528 to make a move insn for that mode. DATA has all the other info. */
2531 store_by_pieces_2 (genfun
, mode
, data
)
2532 rtx (*genfun
) PARAMS ((rtx
, ...));
2533 enum machine_mode mode
;
2534 struct store_by_pieces
*data
;
2536 unsigned int size
= GET_MODE_SIZE (mode
);
2539 while (data
->len
>= size
)
2542 data
->offset
-= size
;
2544 if (data
->autinc_to
)
2545 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2548 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2550 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2551 emit_insn (gen_add2_insn (data
->to_addr
,
2552 GEN_INT (-(HOST_WIDE_INT
) size
)));
2554 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2555 emit_insn ((*genfun
) (to1
, cst
));
2557 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2558 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2560 if (! data
->reverse
)
2561 data
->offset
+= size
;
2567 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2568 its length in bytes. */
2571 clear_storage (object
, size
)
2575 #ifdef TARGET_MEM_FUNCTIONS
2577 tree call_expr
, arg_list
;
2580 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2581 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2583 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2584 just move a zero. Otherwise, do this a piece at a time. */
2585 if (GET_MODE (object
) != BLKmode
2586 && GET_CODE (size
) == CONST_INT
2587 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2588 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2591 object
= protect_from_queue (object
, 1);
2592 size
= protect_from_queue (size
, 0);
2594 if (GET_CODE (size
) == CONST_INT
2595 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2596 clear_by_pieces (object
, INTVAL (size
), align
);
2599 /* Try the most limited insn first, because there's no point
2600 including more than one in the machine description unless
2601 the more limited one has some advantage. */
2603 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2604 enum machine_mode mode
;
2606 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2607 mode
= GET_MODE_WIDER_MODE (mode
))
2609 enum insn_code code
= clrstr_optab
[(int) mode
];
2610 insn_operand_predicate_fn pred
;
2612 if (code
!= CODE_FOR_nothing
2613 /* We don't need MODE to be narrower than
2614 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2615 the mode mask, as it is returned by the macro, it will
2616 definitely be less than the actual mode mask. */
2617 && ((GET_CODE (size
) == CONST_INT
2618 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2619 <= (GET_MODE_MASK (mode
) >> 1)))
2620 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2621 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2622 || (*pred
) (object
, BLKmode
))
2623 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2624 || (*pred
) (opalign
, VOIDmode
)))
2627 rtx last
= get_last_insn ();
2630 op1
= convert_to_mode (mode
, size
, 1);
2631 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2632 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2633 op1
= copy_to_mode_reg (mode
, op1
);
2635 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2642 delete_insns_since (last
);
2646 /* OBJECT or SIZE may have been passed through protect_from_queue.
2648 It is unsafe to save the value generated by protect_from_queue
2649 and reuse it later. Consider what happens if emit_queue is
2650 called before the return value from protect_from_queue is used.
2652 Expansion of the CALL_EXPR below will call emit_queue before
2653 we are finished emitting RTL for argument setup. So if we are
2654 not careful we could get the wrong value for an argument.
2656 To avoid this problem we go ahead and emit code to copy OBJECT
2657 and SIZE into new pseudos. We can then place those new pseudos
2658 into an RTL_EXPR and use them later, even after a call to
2661 Note this is not strictly needed for library calls since they
2662 do not call emit_queue before loading their arguments. However,
2663 we may need to have library calls call emit_queue in the future
2664 since failing to do so could cause problems for targets which
2665 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2666 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2668 #ifdef TARGET_MEM_FUNCTIONS
2669 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2671 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2672 TREE_UNSIGNED (integer_type_node
));
2673 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2676 #ifdef TARGET_MEM_FUNCTIONS
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context.
2680 This could be a user call to memset and the user may wish to
2681 examine the return value from memset.
2683 For targets where libcalls and normal calls have different
2684 conventions for returning pointers, we could end up generating
2687 So instead of using a libcall sequence we build up a suitable
2688 CALL_EXPR and expand the call in the normal fashion. */
2689 if (fn
== NULL_TREE
)
2693 /* This was copied from except.c, I don't know if all this is
2694 necessary in this context or not. */
2695 fn
= get_identifier ("memset");
2696 fntype
= build_pointer_type (void_type_node
);
2697 fntype
= build_function_type (fntype
, NULL_TREE
);
2698 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2699 ggc_add_tree_root (&fn
, 1);
2700 DECL_EXTERNAL (fn
) = 1;
2701 TREE_PUBLIC (fn
) = 1;
2702 DECL_ARTIFICIAL (fn
) = 1;
2703 TREE_NOTHROW (fn
) = 1;
2704 make_decl_rtl (fn
, NULL
);
2705 assemble_external (fn
);
2708 /* We need to make an argument list for the function call.
2710 memset has three arguments, the first is a void * addresses, the
2711 second an integer with the initialization value, the last is a
2712 size_t byte count for the copy. */
2714 = build_tree_list (NULL_TREE
,
2715 make_tree (build_pointer_type (void_type_node
),
2717 TREE_CHAIN (arg_list
)
2718 = build_tree_list (NULL_TREE
,
2719 make_tree (integer_type_node
, const0_rtx
));
2720 TREE_CHAIN (TREE_CHAIN (arg_list
))
2721 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2722 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2724 /* Now we have to build up the CALL_EXPR itself. */
2725 call_expr
= build1 (ADDR_EXPR
,
2726 build_pointer_type (TREE_TYPE (fn
)), fn
);
2727 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2728 call_expr
, arg_list
, NULL_TREE
);
2729 TREE_SIDE_EFFECTS (call_expr
) = 1;
2731 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2733 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2734 VOIDmode
, 2, object
, Pmode
, size
,
2735 TYPE_MODE (integer_type_node
));
2738 /* If we are initializing a readonly value, show the above call
2739 clobbered it. Otherwise, a load from it may erroneously be
2740 hoisted from a loop. */
2741 if (RTX_UNCHANGING_P (object
))
2742 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2749 /* Generate code to copy Y into X.
2750 Both Y and X must have the same mode, except that
2751 Y can be a constant with VOIDmode.
2752 This mode cannot be BLKmode; use emit_block_move for that.
2754 Return the last instruction emitted. */
2757 emit_move_insn (x
, y
)
2760 enum machine_mode mode
= GET_MODE (x
);
2761 rtx y_cst
= NULL_RTX
;
2764 x
= protect_from_queue (x
, 1);
2765 y
= protect_from_queue (y
, 0);
2767 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2770 /* Never force constant_p_rtx to memory. */
2771 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2773 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2776 y
= force_const_mem (mode
, y
);
2779 /* If X or Y are memory references, verify that their addresses are valid
2781 if (GET_CODE (x
) == MEM
2782 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2783 && ! push_operand (x
, GET_MODE (x
)))
2785 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2786 x
= validize_mem (x
);
2788 if (GET_CODE (y
) == MEM
2789 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2791 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2792 y
= validize_mem (y
);
2794 if (mode
== BLKmode
)
2797 last_insn
= emit_move_insn_1 (x
, y
);
2799 if (y_cst
&& GET_CODE (x
) == REG
)
2800 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2805 /* Low level part of emit_move_insn.
2806 Called just like emit_move_insn, but assumes X and Y
2807 are basically valid. */
2810 emit_move_insn_1 (x
, y
)
2813 enum machine_mode mode
= GET_MODE (x
);
2814 enum machine_mode submode
;
2815 enum mode_class
class = GET_MODE_CLASS (mode
);
2817 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2820 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2822 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2824 /* Expand complex moves by moving real part and imag part, if possible. */
2825 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2826 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2828 (class == MODE_COMPLEX_INT
2829 ? MODE_INT
: MODE_FLOAT
),
2831 && (mov_optab
->handlers
[(int) submode
].insn_code
2832 != CODE_FOR_nothing
))
2834 /* Don't split destination if it is a stack push. */
2835 int stack
= push_operand (x
, GET_MODE (x
));
2837 #ifdef PUSH_ROUNDING
2838 /* In case we output to the stack, but the size is smaller machine can
2839 push exactly, we need to use move instructions. */
2841 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2842 != GET_MODE_SIZE (submode
)))
2845 HOST_WIDE_INT offset1
, offset2
;
2847 /* Do not use anti_adjust_stack, since we don't want to update
2848 stack_pointer_delta. */
2849 temp
= expand_binop (Pmode
,
2850 #ifdef STACK_GROWS_DOWNWARD
2858 (GET_MODE_SIZE (GET_MODE (x
)))),
2859 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2861 if (temp
!= stack_pointer_rtx
)
2862 emit_move_insn (stack_pointer_rtx
, temp
);
2864 #ifdef STACK_GROWS_DOWNWARD
2866 offset2
= GET_MODE_SIZE (submode
);
2868 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2869 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2870 + GET_MODE_SIZE (submode
));
2873 emit_move_insn (change_address (x
, submode
,
2874 gen_rtx_PLUS (Pmode
,
2876 GEN_INT (offset1
))),
2877 gen_realpart (submode
, y
));
2878 emit_move_insn (change_address (x
, submode
,
2879 gen_rtx_PLUS (Pmode
,
2881 GEN_INT (offset2
))),
2882 gen_imagpart (submode
, y
));
2886 /* If this is a stack, push the highpart first, so it
2887 will be in the argument order.
2889 In that case, change_address is used only to convert
2890 the mode, not to change the address. */
2893 /* Note that the real part always precedes the imag part in memory
2894 regardless of machine's endianness. */
2895 #ifdef STACK_GROWS_DOWNWARD
2896 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2897 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2898 gen_imagpart (submode
, y
)));
2899 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2900 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2901 gen_realpart (submode
, y
)));
2903 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2904 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2905 gen_realpart (submode
, y
)));
2906 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2907 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2908 gen_imagpart (submode
, y
)));
2913 rtx realpart_x
, realpart_y
;
2914 rtx imagpart_x
, imagpart_y
;
2916 /* If this is a complex value with each part being smaller than a
2917 word, the usual calling sequence will likely pack the pieces into
2918 a single register. Unfortunately, SUBREG of hard registers only
2919 deals in terms of words, so we have a problem converting input
2920 arguments to the CONCAT of two registers that is used elsewhere
2921 for complex values. If this is before reload, we can copy it into
2922 memory and reload. FIXME, we should see about using extract and
2923 insert on integer registers, but complex short and complex char
2924 variables should be rarely used. */
2925 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2926 && (reload_in_progress
| reload_completed
) == 0)
2929 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2931 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2933 if (packed_dest_p
|| packed_src_p
)
2935 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2936 ? MODE_FLOAT
: MODE_INT
);
2938 enum machine_mode reg_mode
2939 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2941 if (reg_mode
!= BLKmode
)
2943 rtx mem
= assign_stack_temp (reg_mode
,
2944 GET_MODE_SIZE (mode
), 0);
2945 rtx cmem
= adjust_address (mem
, mode
, 0);
2948 = N_("function using short complex types cannot be inline");
2952 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2954 emit_move_insn_1 (cmem
, y
);
2955 return emit_move_insn_1 (sreg
, mem
);
2959 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2961 emit_move_insn_1 (mem
, sreg
);
2962 return emit_move_insn_1 (x
, cmem
);
2968 realpart_x
= gen_realpart (submode
, x
);
2969 realpart_y
= gen_realpart (submode
, y
);
2970 imagpart_x
= gen_imagpart (submode
, x
);
2971 imagpart_y
= gen_imagpart (submode
, y
);
2973 /* Show the output dies here. This is necessary for SUBREGs
2974 of pseudos since we cannot track their lifetimes correctly;
2975 hard regs shouldn't appear here except as return values.
2976 We never want to emit such a clobber after reload. */
2978 && ! (reload_in_progress
|| reload_completed
)
2979 && (GET_CODE (realpart_x
) == SUBREG
2980 || GET_CODE (imagpart_x
) == SUBREG
))
2981 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2983 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2984 (realpart_x
, realpart_y
));
2985 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2986 (imagpart_x
, imagpart_y
));
2989 return get_last_insn ();
2992 /* This will handle any multi-word mode that lacks a move_insn pattern.
2993 However, you will get better code if you define such patterns,
2994 even if they must turn into multiple assembler instructions. */
2995 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
3002 #ifdef PUSH_ROUNDING
3004 /* If X is a push on the stack, do the push now and replace
3005 X with a reference to the stack pointer. */
3006 if (push_operand (x
, GET_MODE (x
)))
3011 /* Do not use anti_adjust_stack, since we don't want to update
3012 stack_pointer_delta. */
3013 temp
= expand_binop (Pmode
,
3014 #ifdef STACK_GROWS_DOWNWARD
3022 (GET_MODE_SIZE (GET_MODE (x
)))),
3023 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3025 if (temp
!= stack_pointer_rtx
)
3026 emit_move_insn (stack_pointer_rtx
, temp
);
3028 code
= GET_CODE (XEXP (x
, 0));
3030 /* Just hope that small offsets off SP are OK. */
3031 if (code
== POST_INC
)
3032 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3033 GEN_INT (-((HOST_WIDE_INT
)
3034 GET_MODE_SIZE (GET_MODE (x
)))));
3035 else if (code
== POST_DEC
)
3036 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3037 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3039 temp
= stack_pointer_rtx
;
3041 x
= change_address (x
, VOIDmode
, temp
);
3045 /* If we are in reload, see if either operand is a MEM whose address
3046 is scheduled for replacement. */
3047 if (reload_in_progress
&& GET_CODE (x
) == MEM
3048 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3049 x
= replace_equiv_address_nv (x
, inner
);
3050 if (reload_in_progress
&& GET_CODE (y
) == MEM
3051 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3052 y
= replace_equiv_address_nv (y
, inner
);
3058 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3061 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3062 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3064 /* If we can't get a part of Y, put Y into memory if it is a
3065 constant. Otherwise, force it into a register. If we still
3066 can't get a part of Y, abort. */
3067 if (ypart
== 0 && CONSTANT_P (y
))
3069 y
= force_const_mem (mode
, y
);
3070 ypart
= operand_subword (y
, i
, 1, mode
);
3072 else if (ypart
== 0)
3073 ypart
= operand_subword_force (y
, i
, mode
);
3075 if (xpart
== 0 || ypart
== 0)
3078 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3080 last_insn
= emit_move_insn (xpart
, ypart
);
3083 seq
= gen_sequence ();
3086 /* Show the output dies here. This is necessary for SUBREGs
3087 of pseudos since we cannot track their lifetimes correctly;
3088 hard regs shouldn't appear here except as return values.
3089 We never want to emit such a clobber after reload. */
3091 && ! (reload_in_progress
|| reload_completed
)
3092 && need_clobber
!= 0)
3093 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3103 /* Pushing data onto the stack. */
3105 /* Push a block of length SIZE (perhaps variable)
3106 and return an rtx to address the beginning of the block.
3107 Note that it is not possible for the value returned to be a QUEUED.
3108 The value may be virtual_outgoing_args_rtx.
3110 EXTRA is the number of bytes of padding to push in addition to SIZE.
3111 BELOW nonzero means this padding comes at low addresses;
3112 otherwise, the padding comes at high addresses. */
3115 push_block (size
, extra
, below
)
3121 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3122 if (CONSTANT_P (size
))
3123 anti_adjust_stack (plus_constant (size
, extra
));
3124 else if (GET_CODE (size
) == REG
&& extra
== 0)
3125 anti_adjust_stack (size
);
3128 temp
= copy_to_mode_reg (Pmode
, size
);
3130 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3131 temp
, 0, OPTAB_LIB_WIDEN
);
3132 anti_adjust_stack (temp
);
3135 #ifndef STACK_GROWS_DOWNWARD
3141 temp
= virtual_outgoing_args_rtx
;
3142 if (extra
!= 0 && below
)
3143 temp
= plus_constant (temp
, extra
);
3147 if (GET_CODE (size
) == CONST_INT
)
3148 temp
= plus_constant (virtual_outgoing_args_rtx
,
3149 -INTVAL (size
) - (below
? 0 : extra
));
3150 else if (extra
!= 0 && !below
)
3151 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3152 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3154 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3155 negate_rtx (Pmode
, size
));
3158 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3161 #ifdef PUSH_ROUNDING
3163 /* Emit single push insn. */
3166 emit_single_push_insn (mode
, x
, type
)
3168 enum machine_mode mode
;
3172 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3174 enum insn_code icode
;
3175 insn_operand_predicate_fn pred
;
3177 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3178 /* If there is push pattern, use it. Otherwise try old way of throwing
3179 MEM representing push operation to move expander. */
3180 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3181 if (icode
!= CODE_FOR_nothing
)
3183 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3184 && !((*pred
) (x
, mode
))))
3185 x
= force_reg (mode
, x
);
3186 emit_insn (GEN_FCN (icode
) (x
));
3189 if (GET_MODE_SIZE (mode
) == rounded_size
)
3190 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3193 #ifdef STACK_GROWS_DOWNWARD
3194 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3195 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3197 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3198 GEN_INT (rounded_size
));
3200 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3203 dest
= gen_rtx_MEM (mode
, dest_addr
);
3207 set_mem_attributes (dest
, type
, 1);
3209 if (flag_optimize_sibling_calls
)
3210 /* Function incoming arguments may overlap with sibling call
3211 outgoing arguments and we cannot allow reordering of reads
3212 from function arguments with stores to outgoing arguments
3213 of sibling calls. */
3214 set_mem_alias_set (dest
, 0);
3216 emit_move_insn (dest
, x
);
3220 /* Generate code to push X onto the stack, assuming it has mode MODE and
3222 MODE is redundant except when X is a CONST_INT (since they don't
3224 SIZE is an rtx for the size of data to be copied (in bytes),
3225 needed only if X is BLKmode.
3227 ALIGN (in bits) is maximum alignment we can assume.
3229 If PARTIAL and REG are both nonzero, then copy that many of the first
3230 words of X into registers starting with REG, and push the rest of X.
3231 The amount of space pushed is decreased by PARTIAL words,
3232 rounded *down* to a multiple of PARM_BOUNDARY.
3233 REG must be a hard register in this case.
3234 If REG is zero but PARTIAL is not, take any all others actions for an
3235 argument partially in registers, but do not actually load any
3238 EXTRA is the amount in bytes of extra space to leave next to this arg.
3239 This is ignored if an argument block has already been allocated.
3241 On a machine that lacks real push insns, ARGS_ADDR is the address of
3242 the bottom of the argument block for this call. We use indexing off there
3243 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3244 argument block has not been preallocated.
3246 ARGS_SO_FAR is the size of args previously pushed for this call.
3248 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3249 for arguments passed in registers. If nonzero, it will be the number
3250 of bytes required. */
3253 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3254 args_addr
, args_so_far
, reg_parm_stack_space
,
3257 enum machine_mode mode
;
3266 int reg_parm_stack_space
;
3270 enum direction stack_direction
3271 #ifdef STACK_GROWS_DOWNWARD
3277 /* Decide where to pad the argument: `downward' for below,
3278 `upward' for above, or `none' for don't pad it.
3279 Default is below for small data on big-endian machines; else above. */
3280 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3282 /* Invert direction if stack is post-decrement.
3284 if (STACK_PUSH_CODE
== POST_DEC
)
3285 if (where_pad
!= none
)
3286 where_pad
= (where_pad
== downward
? upward
: downward
);
3288 xinner
= x
= protect_from_queue (x
, 0);
3290 if (mode
== BLKmode
)
3292 /* Copy a block into the stack, entirely or partially. */
3295 int used
= partial
* UNITS_PER_WORD
;
3296 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3304 /* USED is now the # of bytes we need not copy to the stack
3305 because registers will take care of them. */
3308 xinner
= adjust_address (xinner
, BLKmode
, used
);
3310 /* If the partial register-part of the arg counts in its stack size,
3311 skip the part of stack space corresponding to the registers.
3312 Otherwise, start copying to the beginning of the stack space,
3313 by setting SKIP to 0. */
3314 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3316 #ifdef PUSH_ROUNDING
3317 /* Do it with several push insns if that doesn't take lots of insns
3318 and if there is no difficulty with push insns that skip bytes
3319 on the stack for alignment purposes. */
3322 && GET_CODE (size
) == CONST_INT
3324 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3325 /* Here we avoid the case of a structure whose weak alignment
3326 forces many pushes of a small amount of data,
3327 and such small pushes do rounding that causes trouble. */
3328 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3329 || align
>= BIGGEST_ALIGNMENT
3330 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3331 == (align
/ BITS_PER_UNIT
)))
3332 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3334 /* Push padding now if padding above and stack grows down,
3335 or if padding below and stack grows up.
3336 But if space already allocated, this has already been done. */
3337 if (extra
&& args_addr
== 0
3338 && where_pad
!= none
&& where_pad
!= stack_direction
)
3339 anti_adjust_stack (GEN_INT (extra
));
3341 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3344 #endif /* PUSH_ROUNDING */
3348 /* Otherwise make space on the stack and copy the data
3349 to the address of that space. */
3351 /* Deduct words put into registers from the size we must copy. */
3354 if (GET_CODE (size
) == CONST_INT
)
3355 size
= GEN_INT (INTVAL (size
) - used
);
3357 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3358 GEN_INT (used
), NULL_RTX
, 0,
3362 /* Get the address of the stack space.
3363 In this case, we do not deal with EXTRA separately.
3364 A single stack adjust will do. */
3367 temp
= push_block (size
, extra
, where_pad
== downward
);
3370 else if (GET_CODE (args_so_far
) == CONST_INT
)
3371 temp
= memory_address (BLKmode
,
3372 plus_constant (args_addr
,
3373 skip
+ INTVAL (args_so_far
)));
3375 temp
= memory_address (BLKmode
,
3376 plus_constant (gen_rtx_PLUS (Pmode
,
3380 target
= gen_rtx_MEM (BLKmode
, temp
);
3384 set_mem_attributes (target
, type
, 1);
3385 /* Function incoming arguments may overlap with sibling call
3386 outgoing arguments and we cannot allow reordering of reads
3387 from function arguments with stores to outgoing arguments
3388 of sibling calls. */
3389 set_mem_alias_set (target
, 0);
3392 set_mem_align (target
, align
);
3394 /* TEMP is the address of the block. Copy the data there. */
3395 if (GET_CODE (size
) == CONST_INT
3396 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3398 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3403 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3404 enum machine_mode mode
;
3406 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3408 mode
= GET_MODE_WIDER_MODE (mode
))
3410 enum insn_code code
= movstr_optab
[(int) mode
];
3411 insn_operand_predicate_fn pred
;
3413 if (code
!= CODE_FOR_nothing
3414 && ((GET_CODE (size
) == CONST_INT
3415 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3416 <= (GET_MODE_MASK (mode
) >> 1)))
3417 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3418 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3419 || ((*pred
) (target
, BLKmode
)))
3420 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3421 || ((*pred
) (xinner
, BLKmode
)))
3422 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3423 || ((*pred
) (opalign
, VOIDmode
))))
3425 rtx op2
= convert_to_mode (mode
, size
, 1);
3426 rtx last
= get_last_insn ();
3429 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3430 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3431 op2
= copy_to_mode_reg (mode
, op2
);
3433 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3441 delete_insns_since (last
);
3446 if (!ACCUMULATE_OUTGOING_ARGS
)
3448 /* If the source is referenced relative to the stack pointer,
3449 copy it to another register to stabilize it. We do not need
3450 to do this if we know that we won't be changing sp. */
3452 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3453 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3454 temp
= copy_to_reg (temp
);
3457 /* Make inhibit_defer_pop nonzero around the library call
3458 to force it to pop the bcopy-arguments right away. */
3460 #ifdef TARGET_MEM_FUNCTIONS
3461 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3462 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3463 convert_to_mode (TYPE_MODE (sizetype
),
3464 size
, TREE_UNSIGNED (sizetype
)),
3465 TYPE_MODE (sizetype
));
3467 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3468 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3469 convert_to_mode (TYPE_MODE (integer_type_node
),
3471 TREE_UNSIGNED (integer_type_node
)),
3472 TYPE_MODE (integer_type_node
));
3477 else if (partial
> 0)
3479 /* Scalar partly in registers. */
3481 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3484 /* # words of start of argument
3485 that we must make space for but need not store. */
3486 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3487 int args_offset
= INTVAL (args_so_far
);
3490 /* Push padding now if padding above and stack grows down,
3491 or if padding below and stack grows up.
3492 But if space already allocated, this has already been done. */
3493 if (extra
&& args_addr
== 0
3494 && where_pad
!= none
&& where_pad
!= stack_direction
)
3495 anti_adjust_stack (GEN_INT (extra
));
3497 /* If we make space by pushing it, we might as well push
3498 the real data. Otherwise, we can leave OFFSET nonzero
3499 and leave the space uninitialized. */
3503 /* Now NOT_STACK gets the number of words that we don't need to
3504 allocate on the stack. */
3505 not_stack
= partial
- offset
;
3507 /* If the partial register-part of the arg counts in its stack size,
3508 skip the part of stack space corresponding to the registers.
3509 Otherwise, start copying to the beginning of the stack space,
3510 by setting SKIP to 0. */
3511 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3513 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3514 x
= validize_mem (force_const_mem (mode
, x
));
3516 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3517 SUBREGs of such registers are not allowed. */
3518 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3519 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3520 x
= copy_to_reg (x
);
3522 /* Loop over all the words allocated on the stack for this arg. */
3523 /* We can do it by words, because any scalar bigger than a word
3524 has a size a multiple of a word. */
3525 #ifndef PUSH_ARGS_REVERSED
3526 for (i
= not_stack
; i
< size
; i
++)
3528 for (i
= size
- 1; i
>= not_stack
; i
--)
3530 if (i
>= not_stack
+ offset
)
3531 emit_push_insn (operand_subword_force (x
, i
, mode
),
3532 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3534 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3536 reg_parm_stack_space
, alignment_pad
);
3541 rtx target
= NULL_RTX
;
3544 /* Push padding now if padding above and stack grows down,
3545 or if padding below and stack grows up.
3546 But if space already allocated, this has already been done. */
3547 if (extra
&& args_addr
== 0
3548 && where_pad
!= none
&& where_pad
!= stack_direction
)
3549 anti_adjust_stack (GEN_INT (extra
));
3551 #ifdef PUSH_ROUNDING
3552 if (args_addr
== 0 && PUSH_ARGS
)
3553 emit_single_push_insn (mode
, x
, type
);
3557 if (GET_CODE (args_so_far
) == CONST_INT
)
3559 = memory_address (mode
,
3560 plus_constant (args_addr
,
3561 INTVAL (args_so_far
)));
3563 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3566 dest
= gen_rtx_MEM (mode
, addr
);
3569 set_mem_attributes (dest
, type
, 1);
3570 /* Function incoming arguments may overlap with sibling call
3571 outgoing arguments and we cannot allow reordering of reads
3572 from function arguments with stores to outgoing arguments
3573 of sibling calls. */
3574 set_mem_alias_set (dest
, 0);
3577 emit_move_insn (dest
, x
);
3583 /* If part should go in registers, copy that part
3584 into the appropriate registers. Do this now, at the end,
3585 since mem-to-mem copies above may do function calls. */
3586 if (partial
> 0 && reg
!= 0)
3588 /* Handle calls that pass values in multiple non-contiguous locations.
3589 The Irix 6 ABI has examples of this. */
3590 if (GET_CODE (reg
) == PARALLEL
)
3591 emit_group_load (reg
, x
, -1); /* ??? size? */
3593 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3596 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3597 anti_adjust_stack (GEN_INT (extra
));
3599 if (alignment_pad
&& args_addr
== 0)
3600 anti_adjust_stack (alignment_pad
);
3603 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3611 /* Only registers can be subtargets. */
3612 || GET_CODE (x
) != REG
3613 /* If the register is readonly, it can't be set more than once. */
3614 || RTX_UNCHANGING_P (x
)
3615 /* Don't use hard regs to avoid extending their life. */
3616 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3617 /* Avoid subtargets inside loops,
3618 since they hide some invariant expressions. */
3619 || preserve_subexpressions_p ())
3623 /* Expand an assignment that stores the value of FROM into TO.
3624 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3625 (This may contain a QUEUED rtx;
3626 if the value is constant, this rtx is a constant.)
3627 Otherwise, the returned value is NULL_RTX.
3629 SUGGEST_REG is no longer actually used.
3630 It used to mean, copy the value through a register
3631 and return that register, if that is possible.
3632 We now use WANT_VALUE to decide whether to do this. */
3635 expand_assignment (to
, from
, want_value
, suggest_reg
)
3638 int suggest_reg ATTRIBUTE_UNUSED
;
3643 /* Don't crash if the lhs of the assignment was erroneous. */
3645 if (TREE_CODE (to
) == ERROR_MARK
)
3647 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3648 return want_value
? result
: NULL_RTX
;
3651 /* Assignment of a structure component needs special treatment
3652 if the structure component's rtx is not simply a MEM.
3653 Assignment of an array element at a constant index, and assignment of
3654 an array element in an unaligned packed structure field, has the same
3657 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3658 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3660 enum machine_mode mode1
;
3661 HOST_WIDE_INT bitsize
, bitpos
;
3669 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3670 &unsignedp
, &volatilep
);
3672 /* If we are going to use store_bit_field and extract_bit_field,
3673 make sure to_rtx will be safe for multiple use. */
3675 if (mode1
== VOIDmode
&& want_value
)
3676 tem
= stabilize_reference (tem
);
3678 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3682 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3684 if (GET_CODE (to_rtx
) != MEM
)
3687 if (GET_MODE (offset_rtx
) != ptr_mode
)
3688 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3690 #ifdef POINTERS_EXTEND_UNSIGNED
3691 if (GET_MODE (offset_rtx
) != Pmode
)
3692 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3695 /* A constant address in TO_RTX can have VOIDmode, we must not try
3696 to call force_reg for that case. Avoid that case. */
3697 if (GET_CODE (to_rtx
) == MEM
3698 && GET_MODE (to_rtx
) == BLKmode
3699 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3701 && (bitpos
% bitsize
) == 0
3702 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3703 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3705 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3709 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3710 highest_pow2_factor (offset
));
3713 if (GET_CODE (to_rtx
) == MEM
)
3715 tree old_expr
= MEM_EXPR (to_rtx
);
3717 /* If the field is at offset zero, we could have been given the
3718 DECL_RTX of the parent struct. Don't munge it. */
3719 to_rtx
= shallow_copy_rtx (to_rtx
);
3721 set_mem_attributes (to_rtx
, to
, 0);
3723 /* If we changed MEM_EXPR, that means we're now referencing
3724 the COMPONENT_REF, which means that MEM_OFFSET must be
3725 relative to that field. But we've not yet reflected BITPOS
3726 in TO_RTX. This will be done in store_field. Adjust for
3727 that by biasing MEM_OFFSET by -bitpos. */
3728 if (MEM_EXPR (to_rtx
) != old_expr
&& MEM_OFFSET (to_rtx
)
3729 && (bitpos
/ BITS_PER_UNIT
) != 0)
3730 set_mem_offset (to_rtx
, GEN_INT (INTVAL (MEM_OFFSET (to_rtx
))
3731 - (bitpos
/ BITS_PER_UNIT
)));
3734 /* Deal with volatile and readonly fields. The former is only done
3735 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3736 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3738 if (to_rtx
== orig_to_rtx
)
3739 to_rtx
= copy_rtx (to_rtx
);
3740 MEM_VOLATILE_P (to_rtx
) = 1;
3743 if (TREE_CODE (to
) == COMPONENT_REF
3744 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3746 if (to_rtx
== orig_to_rtx
)
3747 to_rtx
= copy_rtx (to_rtx
);
3748 RTX_UNCHANGING_P (to_rtx
) = 1;
3751 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3753 if (to_rtx
== orig_to_rtx
)
3754 to_rtx
= copy_rtx (to_rtx
);
3755 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3758 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3760 /* Spurious cast for HPUX compiler. */
3761 ? ((enum machine_mode
)
3762 TYPE_MODE (TREE_TYPE (to
)))
3764 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3766 preserve_temp_slots (result
);
3770 /* If the value is meaningful, convert RESULT to the proper mode.
3771 Otherwise, return nothing. */
3772 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3773 TYPE_MODE (TREE_TYPE (from
)),
3775 TREE_UNSIGNED (TREE_TYPE (to
)))
3779 /* If the rhs is a function call and its value is not an aggregate,
3780 call the function before we start to compute the lhs.
3781 This is needed for correct code for cases such as
3782 val = setjmp (buf) on machines where reference to val
3783 requires loading up part of an address in a separate insn.
3785 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3786 since it might be a promoted variable where the zero- or sign- extension
3787 needs to be done. Handling this in the normal way is safe because no
3788 computation is done before the call. */
3789 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3790 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3791 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3792 && GET_CODE (DECL_RTL (to
)) == REG
))
3797 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3799 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3801 /* Handle calls that return values in multiple non-contiguous locations.
3802 The Irix 6 ABI has examples of this. */
3803 if (GET_CODE (to_rtx
) == PARALLEL
)
3804 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3805 else if (GET_MODE (to_rtx
) == BLKmode
)
3806 emit_block_move (to_rtx
, value
, expr_size (from
));
3809 #ifdef POINTERS_EXTEND_UNSIGNED
3810 if (POINTER_TYPE_P (TREE_TYPE (to
))
3811 && GET_MODE (to_rtx
) != GET_MODE (value
))
3812 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3814 emit_move_insn (to_rtx
, value
);
3816 preserve_temp_slots (to_rtx
);
3819 return want_value
? to_rtx
: NULL_RTX
;
3822 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3823 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3826 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3828 /* Don't move directly into a return register. */
3829 if (TREE_CODE (to
) == RESULT_DECL
3830 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3835 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3837 if (GET_CODE (to_rtx
) == PARALLEL
)
3838 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3840 emit_move_insn (to_rtx
, temp
);
3842 preserve_temp_slots (to_rtx
);
3845 return want_value
? to_rtx
: NULL_RTX
;
3848 /* In case we are returning the contents of an object which overlaps
3849 the place the value is being stored, use a safe function when copying
3850 a value through a pointer into a structure value return block. */
3851 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3852 && current_function_returns_struct
3853 && !current_function_returns_pcc_struct
)
3858 size
= expr_size (from
);
3859 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3861 #ifdef TARGET_MEM_FUNCTIONS
3862 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3863 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3864 XEXP (from_rtx
, 0), Pmode
,
3865 convert_to_mode (TYPE_MODE (sizetype
),
3866 size
, TREE_UNSIGNED (sizetype
)),
3867 TYPE_MODE (sizetype
));
3869 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3870 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3871 XEXP (to_rtx
, 0), Pmode
,
3872 convert_to_mode (TYPE_MODE (integer_type_node
),
3873 size
, TREE_UNSIGNED (integer_type_node
)),
3874 TYPE_MODE (integer_type_node
));
3877 preserve_temp_slots (to_rtx
);
3880 return want_value
? to_rtx
: NULL_RTX
;
3883 /* Compute FROM and store the value in the rtx we got. */
3886 result
= store_expr (from
, to_rtx
, want_value
);
3887 preserve_temp_slots (result
);
3890 return want_value
? result
: NULL_RTX
;
3893 /* Generate code for computing expression EXP,
3894 and storing the value into TARGET.
3895 TARGET may contain a QUEUED rtx.
3897 If WANT_VALUE is nonzero, return a copy of the value
3898 not in TARGET, so that we can be sure to use the proper
3899 value in a containing expression even if TARGET has something
3900 else stored in it. If possible, we copy the value through a pseudo
3901 and return that pseudo. Or, if the value is constant, we try to
3902 return the constant. In some cases, we return a pseudo
3903 copied *from* TARGET.
3905 If the mode is BLKmode then we may return TARGET itself.
3906 It turns out that in BLKmode it doesn't cause a problem.
3907 because C has no operators that could combine two different
3908 assignments into the same BLKmode object with different values
3909 with no sequence point. Will other languages need this to
3912 If WANT_VALUE is 0, we return NULL, to make sure
3913 to catch quickly any cases where the caller uses the value
3914 and fails to set WANT_VALUE. */
3917 store_expr (exp
, target
, want_value
)
3923 int dont_return_target
= 0;
3924 int dont_store_target
= 0;
3926 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3928 /* Perform first part of compound expression, then assign from second
3930 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3932 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3934 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3936 /* For conditional expression, get safe form of the target. Then
3937 test the condition, doing the appropriate assignment on either
3938 side. This avoids the creation of unnecessary temporaries.
3939 For non-BLKmode, it is more efficient not to do this. */
3941 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3944 target
= protect_from_queue (target
, 1);
3946 do_pending_stack_adjust ();
3948 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3949 start_cleanup_deferral ();
3950 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3951 end_cleanup_deferral ();
3953 emit_jump_insn (gen_jump (lab2
));
3956 start_cleanup_deferral ();
3957 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3958 end_cleanup_deferral ();
3963 return want_value
? target
: NULL_RTX
;
3965 else if (queued_subexp_p (target
))
3966 /* If target contains a postincrement, let's not risk
3967 using it as the place to generate the rhs. */
3969 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3971 /* Expand EXP into a new pseudo. */
3972 temp
= gen_reg_rtx (GET_MODE (target
));
3973 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3976 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3978 /* If target is volatile, ANSI requires accessing the value
3979 *from* the target, if it is accessed. So make that happen.
3980 In no case return the target itself. */
3981 if (! MEM_VOLATILE_P (target
) && want_value
)
3982 dont_return_target
= 1;
3984 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3985 && GET_MODE (target
) != BLKmode
)
3986 /* If target is in memory and caller wants value in a register instead,
3987 arrange that. Pass TARGET as target for expand_expr so that,
3988 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3989 We know expand_expr will not use the target in that case.
3990 Don't do this if TARGET is volatile because we are supposed
3991 to write it and then read it. */
3993 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3994 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3996 /* If TEMP is already in the desired TARGET, only copy it from
3997 memory and don't store it there again. */
3999 || (rtx_equal_p (temp
, target
)
4000 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4001 dont_store_target
= 1;
4002 temp
= copy_to_reg (temp
);
4004 dont_return_target
= 1;
4006 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4007 /* If this is an scalar in a register that is stored in a wider mode
4008 than the declared mode, compute the result into its declared mode
4009 and then convert to the wider mode. Our value is the computed
4012 rtx inner_target
= 0;
4014 /* If we don't want a value, we can do the conversion inside EXP,
4015 which will often result in some optimizations. Do the conversion
4016 in two steps: first change the signedness, if needed, then
4017 the extend. But don't do this if the type of EXP is a subtype
4018 of something else since then the conversion might involve
4019 more than just converting modes. */
4020 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4021 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4023 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4024 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4026 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4027 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4029 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4030 (GET_MODE (SUBREG_REG (target
)),
4031 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4034 inner_target
= SUBREG_REG (target
);
4037 temp
= expand_expr (exp
, inner_target
, VOIDmode
, 0);
4039 /* If TEMP is a volatile MEM and we want a result value, make
4040 the access now so it gets done only once. Likewise if
4041 it contains TARGET. */
4042 if (GET_CODE (temp
) == MEM
&& want_value
4043 && (MEM_VOLATILE_P (temp
)
4044 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4045 temp
= copy_to_reg (temp
);
4047 /* If TEMP is a VOIDmode constant, use convert_modes to make
4048 sure that we properly convert it. */
4049 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4051 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4052 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4053 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4054 GET_MODE (target
), temp
,
4055 SUBREG_PROMOTED_UNSIGNED_P (target
));
4058 convert_move (SUBREG_REG (target
), temp
,
4059 SUBREG_PROMOTED_UNSIGNED_P (target
));
4061 /* If we promoted a constant, change the mode back down to match
4062 target. Otherwise, the caller might get confused by a result whose
4063 mode is larger than expected. */
4065 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
))
4067 if (GET_MODE (temp
) != VOIDmode
)
4069 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4070 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4071 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4072 SUBREG_PROMOTED_UNSIGNED_P (target
));
4075 temp
= convert_modes (GET_MODE (target
),
4076 GET_MODE (SUBREG_REG (target
)),
4077 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4080 return want_value
? temp
: NULL_RTX
;
4084 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4085 /* Return TARGET if it's a specified hardware register.
4086 If TARGET is a volatile mem ref, either return TARGET
4087 or return a reg copied *from* TARGET; ANSI requires this.
4089 Otherwise, if TEMP is not TARGET, return TEMP
4090 if it is constant (for efficiency),
4091 or if we really want the correct value. */
4092 if (!(target
&& GET_CODE (target
) == REG
4093 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4094 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4095 && ! rtx_equal_p (temp
, target
)
4096 && (CONSTANT_P (temp
) || want_value
))
4097 dont_return_target
= 1;
4100 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4101 the same as that of TARGET, adjust the constant. This is needed, for
4102 example, in case it is a CONST_DOUBLE and we want only a word-sized
4104 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4105 && TREE_CODE (exp
) != ERROR_MARK
4106 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4107 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4108 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4110 /* If value was not generated in the target, store it there.
4111 Convert the value to TARGET's type first if necessary.
4112 If TEMP and TARGET compare equal according to rtx_equal_p, but
4113 one or both of them are volatile memory refs, we have to distinguish
4115 - expand_expr has used TARGET. In this case, we must not generate
4116 another copy. This can be detected by TARGET being equal according
4118 - expand_expr has not used TARGET - that means that the source just
4119 happens to have the same RTX form. Since temp will have been created
4120 by expand_expr, it will compare unequal according to == .
4121 We must generate a copy in this case, to reach the correct number
4122 of volatile memory references. */
4124 if ((! rtx_equal_p (temp
, target
)
4125 || (temp
!= target
&& (side_effects_p (temp
)
4126 || side_effects_p (target
))))
4127 && TREE_CODE (exp
) != ERROR_MARK
4128 && ! dont_store_target
)
4130 target
= protect_from_queue (target
, 1);
4131 if (GET_MODE (temp
) != GET_MODE (target
)
4132 && GET_MODE (temp
) != VOIDmode
)
4134 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4135 if (dont_return_target
)
4137 /* In this case, we will return TEMP,
4138 so make sure it has the proper mode.
4139 But don't forget to store the value into TARGET. */
4140 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4141 emit_move_insn (target
, temp
);
4144 convert_move (target
, temp
, unsignedp
);
4147 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4149 /* Handle copying a string constant into an array. The string
4150 constant may be shorter than the array. So copy just the string's
4151 actual length, and clear the rest. First get the size of the data
4152 type of the string, which is actually the size of the target. */
4153 rtx size
= expr_size (exp
);
4155 if (GET_CODE (size
) == CONST_INT
4156 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4157 emit_block_move (target
, temp
, size
);
4160 /* Compute the size of the data to copy from the string. */
4162 = size_binop (MIN_EXPR
,
4163 make_tree (sizetype
, size
),
4164 size_int (TREE_STRING_LENGTH (exp
)));
4165 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4169 /* Copy that much. */
4170 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4171 emit_block_move (target
, temp
, copy_size_rtx
);
4173 /* Figure out how much is left in TARGET that we have to clear.
4174 Do all calculations in ptr_mode. */
4175 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4177 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4178 target
= adjust_address (target
, BLKmode
,
4179 INTVAL (copy_size_rtx
));
4183 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4184 copy_size_rtx
, NULL_RTX
, 0,
4187 #ifdef POINTERS_EXTEND_UNSIGNED
4188 if (GET_MODE (copy_size_rtx
) != Pmode
)
4189 copy_size_rtx
= convert_memory_address (Pmode
,
4193 target
= offset_address (target
, copy_size_rtx
,
4194 highest_pow2_factor (copy_size
));
4195 label
= gen_label_rtx ();
4196 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4197 GET_MODE (size
), 0, label
);
4200 if (size
!= const0_rtx
)
4201 clear_storage (target
, size
);
4207 /* Handle calls that return values in multiple non-contiguous locations.
4208 The Irix 6 ABI has examples of this. */
4209 else if (GET_CODE (target
) == PARALLEL
)
4210 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4211 else if (GET_MODE (temp
) == BLKmode
)
4212 emit_block_move (target
, temp
, expr_size (exp
));
4214 emit_move_insn (target
, temp
);
4217 /* If we don't want a value, return NULL_RTX. */
4221 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4222 ??? The latter test doesn't seem to make sense. */
4223 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4226 /* Return TARGET itself if it is a hard register. */
4227 else if (want_value
&& GET_MODE (target
) != BLKmode
4228 && ! (GET_CODE (target
) == REG
4229 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4230 return copy_to_reg (target
);
4236 /* Return 1 if EXP just contains zeros. */
4244 switch (TREE_CODE (exp
))
4248 case NON_LVALUE_EXPR
:
4249 case VIEW_CONVERT_EXPR
:
4250 return is_zeros_p (TREE_OPERAND (exp
, 0));
4253 return integer_zerop (exp
);
4257 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4260 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4263 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4264 elt
= TREE_CHAIN (elt
))
4265 if (!is_zeros_p (TREE_VALUE (elt
)))
4271 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4272 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4273 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4274 if (! is_zeros_p (TREE_VALUE (elt
)))
4284 /* Return 1 if EXP contains mostly (3/4) zeros. */
4287 mostly_zeros_p (exp
)
4290 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4292 int elts
= 0, zeros
= 0;
4293 tree elt
= CONSTRUCTOR_ELTS (exp
);
4294 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4296 /* If there are no ranges of true bits, it is all zero. */
4297 return elt
== NULL_TREE
;
4299 for (; elt
; elt
= TREE_CHAIN (elt
))
4301 /* We do not handle the case where the index is a RANGE_EXPR,
4302 so the statistic will be somewhat inaccurate.
4303 We do make a more accurate count in store_constructor itself,
4304 so since this function is only used for nested array elements,
4305 this should be close enough. */
4306 if (mostly_zeros_p (TREE_VALUE (elt
)))
4311 return 4 * zeros
>= 3 * elts
;
4314 return is_zeros_p (exp
);
4317 /* Helper function for store_constructor.
4318 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4319 TYPE is the type of the CONSTRUCTOR, not the element type.
4320 CLEARED is as for store_constructor.
4321 ALIAS_SET is the alias set to use for any stores.
4323 This provides a recursive shortcut back to store_constructor when it isn't
4324 necessary to go through store_field. This is so that we can pass through
4325 the cleared field to let store_constructor know that we may not have to
4326 clear a substructure if the outer structure has already been cleared. */
4329 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4332 unsigned HOST_WIDE_INT bitsize
;
4333 HOST_WIDE_INT bitpos
;
4334 enum machine_mode mode
;
4339 if (TREE_CODE (exp
) == CONSTRUCTOR
4340 && bitpos
% BITS_PER_UNIT
== 0
4341 /* If we have a non-zero bitpos for a register target, then we just
4342 let store_field do the bitfield handling. This is unlikely to
4343 generate unnecessary clear instructions anyways. */
4344 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4346 if (GET_CODE (target
) == MEM
)
4348 = adjust_address (target
,
4349 GET_MODE (target
) == BLKmode
4351 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4352 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4355 /* Update the alias set, if required. */
4356 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4357 && MEM_ALIAS_SET (target
) != 0)
4359 target
= copy_rtx (target
);
4360 set_mem_alias_set (target
, alias_set
);
4363 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4366 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4370 /* Store the value of constructor EXP into the rtx TARGET.
4371 TARGET is either a REG or a MEM; we know it cannot conflict, since
4372 safe_from_p has been called.
4373 CLEARED is true if TARGET is known to have been zero'd.
4374 SIZE is the number of bytes of TARGET we are allowed to modify: this
4375 may not be the same as the size of EXP if we are assigning to a field
4376 which has been packed to exclude padding bits. */
4379 store_constructor (exp
, target
, cleared
, size
)
4385 tree type
= TREE_TYPE (exp
);
4386 #ifdef WORD_REGISTER_OPERATIONS
4387 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4390 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4391 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4395 /* We either clear the aggregate or indicate the value is dead. */
4396 if ((TREE_CODE (type
) == UNION_TYPE
4397 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4399 && ! CONSTRUCTOR_ELTS (exp
))
4400 /* If the constructor is empty, clear the union. */
4402 clear_storage (target
, expr_size (exp
));
4406 /* If we are building a static constructor into a register,
4407 set the initial value as zero so we can fold the value into
4408 a constant. But if more than one register is involved,
4409 this probably loses. */
4410 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4411 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4413 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4417 /* If the constructor has fewer fields than the structure
4418 or if we are initializing the structure to mostly zeros,
4419 clear the whole structure first. Don't do this if TARGET is a
4420 register whose mode size isn't equal to SIZE since clear_storage
4421 can't handle this case. */
4422 else if (! cleared
&& size
> 0
4423 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4424 != fields_length (type
))
4425 || mostly_zeros_p (exp
))
4426 && (GET_CODE (target
) != REG
4427 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4430 clear_storage (target
, GEN_INT (size
));
4435 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4437 /* Store each element of the constructor into
4438 the corresponding field of TARGET. */
4440 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4442 tree field
= TREE_PURPOSE (elt
);
4443 tree value
= TREE_VALUE (elt
);
4444 enum machine_mode mode
;
4445 HOST_WIDE_INT bitsize
;
4446 HOST_WIDE_INT bitpos
= 0;
4449 rtx to_rtx
= target
;
4451 /* Just ignore missing fields.
4452 We cleared the whole structure, above,
4453 if any fields are missing. */
4457 if (cleared
&& is_zeros_p (value
))
4460 if (host_integerp (DECL_SIZE (field
), 1))
4461 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4465 unsignedp
= TREE_UNSIGNED (field
);
4466 mode
= DECL_MODE (field
);
4467 if (DECL_BIT_FIELD (field
))
4470 offset
= DECL_FIELD_OFFSET (field
);
4471 if (host_integerp (offset
, 0)
4472 && host_integerp (bit_position (field
), 0))
4474 bitpos
= int_bit_position (field
);
4478 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4484 if (contains_placeholder_p (offset
))
4485 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4486 offset
, make_tree (TREE_TYPE (exp
), target
));
4488 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4489 if (GET_CODE (to_rtx
) != MEM
)
4492 if (GET_MODE (offset_rtx
) != ptr_mode
)
4493 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 if (GET_MODE (offset_rtx
) != Pmode
)
4497 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4500 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4501 highest_pow2_factor (offset
));
4504 if (TREE_READONLY (field
))
4506 if (GET_CODE (to_rtx
) == MEM
)
4507 to_rtx
= copy_rtx (to_rtx
);
4509 RTX_UNCHANGING_P (to_rtx
) = 1;
4512 #ifdef WORD_REGISTER_OPERATIONS
4513 /* If this initializes a field that is smaller than a word, at the
4514 start of a word, try to widen it to a full word.
4515 This special case allows us to output C++ member function
4516 initializations in a form that the optimizers can understand. */
4517 if (GET_CODE (target
) == REG
4518 && bitsize
< BITS_PER_WORD
4519 && bitpos
% BITS_PER_WORD
== 0
4520 && GET_MODE_CLASS (mode
) == MODE_INT
4521 && TREE_CODE (value
) == INTEGER_CST
4523 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4525 tree type
= TREE_TYPE (value
);
4527 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4529 type
= (*lang_hooks
.types
.type_for_size
)
4530 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4531 value
= convert (type
, value
);
4534 if (BYTES_BIG_ENDIAN
)
4536 = fold (build (LSHIFT_EXPR
, type
, value
,
4537 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4538 bitsize
= BITS_PER_WORD
;
4543 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4544 && DECL_NONADDRESSABLE_P (field
))
4546 to_rtx
= copy_rtx (to_rtx
);
4547 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4550 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4551 value
, type
, cleared
,
4552 get_alias_set (TREE_TYPE (field
)));
4555 else if (TREE_CODE (type
) == ARRAY_TYPE
4556 || TREE_CODE (type
) == VECTOR_TYPE
)
4561 tree domain
= TYPE_DOMAIN (type
);
4562 tree elttype
= TREE_TYPE (type
);
4564 HOST_WIDE_INT minelt
= 0;
4565 HOST_WIDE_INT maxelt
= 0;
4567 /* Vectors are like arrays, but the domain is stored via an array
4569 if (TREE_CODE (type
) == VECTOR_TYPE
)
4571 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4572 the same field as TYPE_DOMAIN, we are not guaranteed that
4574 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4575 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4578 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4579 && TYPE_MAX_VALUE (domain
)
4580 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4581 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4583 /* If we have constant bounds for the range of the type, get them. */
4586 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4587 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4590 /* If the constructor has fewer elements than the array,
4591 clear the whole array first. Similarly if this is
4592 static constructor of a non-BLKmode object. */
4593 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4597 HOST_WIDE_INT count
= 0, zero_count
= 0;
4598 need_to_clear
= ! const_bounds_p
;
4600 /* This loop is a more accurate version of the loop in
4601 mostly_zeros_p (it handles RANGE_EXPR in an index).
4602 It is also needed to check for missing elements. */
4603 for (elt
= CONSTRUCTOR_ELTS (exp
);
4604 elt
!= NULL_TREE
&& ! need_to_clear
;
4605 elt
= TREE_CHAIN (elt
))
4607 tree index
= TREE_PURPOSE (elt
);
4608 HOST_WIDE_INT this_node_count
;
4610 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4612 tree lo_index
= TREE_OPERAND (index
, 0);
4613 tree hi_index
= TREE_OPERAND (index
, 1);
4615 if (! host_integerp (lo_index
, 1)
4616 || ! host_integerp (hi_index
, 1))
4622 this_node_count
= (tree_low_cst (hi_index
, 1)
4623 - tree_low_cst (lo_index
, 1) + 1);
4626 this_node_count
= 1;
4628 count
+= this_node_count
;
4629 if (mostly_zeros_p (TREE_VALUE (elt
)))
4630 zero_count
+= this_node_count
;
4633 /* Clear the entire array first if there are any missing elements,
4634 or if the incidence of zero elements is >= 75%. */
4636 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4640 if (need_to_clear
&& size
> 0)
4645 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4647 clear_storage (target
, GEN_INT (size
));
4651 else if (REG_P (target
))
4652 /* Inform later passes that the old value is dead. */
4653 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4655 /* Store each element of the constructor into
4656 the corresponding element of TARGET, determined
4657 by counting the elements. */
4658 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4660 elt
= TREE_CHAIN (elt
), i
++)
4662 enum machine_mode mode
;
4663 HOST_WIDE_INT bitsize
;
4664 HOST_WIDE_INT bitpos
;
4666 tree value
= TREE_VALUE (elt
);
4667 tree index
= TREE_PURPOSE (elt
);
4668 rtx xtarget
= target
;
4670 if (cleared
&& is_zeros_p (value
))
4673 unsignedp
= TREE_UNSIGNED (elttype
);
4674 mode
= TYPE_MODE (elttype
);
4675 if (mode
== BLKmode
)
4676 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4677 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4680 bitsize
= GET_MODE_BITSIZE (mode
);
4682 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4684 tree lo_index
= TREE_OPERAND (index
, 0);
4685 tree hi_index
= TREE_OPERAND (index
, 1);
4686 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4687 struct nesting
*loop
;
4688 HOST_WIDE_INT lo
, hi
, count
;
4691 /* If the range is constant and "small", unroll the loop. */
4693 && host_integerp (lo_index
, 0)
4694 && host_integerp (hi_index
, 0)
4695 && (lo
= tree_low_cst (lo_index
, 0),
4696 hi
= tree_low_cst (hi_index
, 0),
4697 count
= hi
- lo
+ 1,
4698 (GET_CODE (target
) != MEM
4700 || (host_integerp (TYPE_SIZE (elttype
), 1)
4701 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4704 lo
-= minelt
; hi
-= minelt
;
4705 for (; lo
<= hi
; lo
++)
4707 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4709 if (GET_CODE (target
) == MEM
4710 && !MEM_KEEP_ALIAS_SET_P (target
)
4711 && TREE_CODE (type
) == ARRAY_TYPE
4712 && TYPE_NONALIASED_COMPONENT (type
))
4714 target
= copy_rtx (target
);
4715 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4718 store_constructor_field
4719 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4720 get_alias_set (elttype
));
4725 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4726 loop_top
= gen_label_rtx ();
4727 loop_end
= gen_label_rtx ();
4729 unsignedp
= TREE_UNSIGNED (domain
);
4731 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4734 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4736 SET_DECL_RTL (index
, index_r
);
4737 if (TREE_CODE (value
) == SAVE_EXPR
4738 && SAVE_EXPR_RTL (value
) == 0)
4740 /* Make sure value gets expanded once before the
4742 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4745 store_expr (lo_index
, index_r
, 0);
4746 loop
= expand_start_loop (0);
4748 /* Assign value to element index. */
4750 = convert (ssizetype
,
4751 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4752 index
, TYPE_MIN_VALUE (domain
))));
4753 position
= size_binop (MULT_EXPR
, position
,
4755 TYPE_SIZE_UNIT (elttype
)));
4757 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4758 xtarget
= offset_address (target
, pos_rtx
,
4759 highest_pow2_factor (position
));
4760 xtarget
= adjust_address (xtarget
, mode
, 0);
4761 if (TREE_CODE (value
) == CONSTRUCTOR
)
4762 store_constructor (value
, xtarget
, cleared
,
4763 bitsize
/ BITS_PER_UNIT
);
4765 store_expr (value
, xtarget
, 0);
4767 expand_exit_loop_if_false (loop
,
4768 build (LT_EXPR
, integer_type_node
,
4771 expand_increment (build (PREINCREMENT_EXPR
,
4773 index
, integer_one_node
), 0, 0);
4775 emit_label (loop_end
);
4778 else if ((index
!= 0 && ! host_integerp (index
, 0))
4779 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4784 index
= ssize_int (1);
4787 index
= convert (ssizetype
,
4788 fold (build (MINUS_EXPR
, index
,
4789 TYPE_MIN_VALUE (domain
))));
4791 position
= size_binop (MULT_EXPR
, index
,
4793 TYPE_SIZE_UNIT (elttype
)));
4794 xtarget
= offset_address (target
,
4795 expand_expr (position
, 0, VOIDmode
, 0),
4796 highest_pow2_factor (position
));
4797 xtarget
= adjust_address (xtarget
, mode
, 0);
4798 store_expr (value
, xtarget
, 0);
4803 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4804 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4806 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4808 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4809 && TREE_CODE (type
) == ARRAY_TYPE
4810 && TYPE_NONALIASED_COMPONENT (type
))
4812 target
= copy_rtx (target
);
4813 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4816 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4817 type
, cleared
, get_alias_set (elttype
));
4823 /* Set constructor assignments. */
4824 else if (TREE_CODE (type
) == SET_TYPE
)
4826 tree elt
= CONSTRUCTOR_ELTS (exp
);
4827 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4828 tree domain
= TYPE_DOMAIN (type
);
4829 tree domain_min
, domain_max
, bitlength
;
4831 /* The default implementation strategy is to extract the constant
4832 parts of the constructor, use that to initialize the target,
4833 and then "or" in whatever non-constant ranges we need in addition.
4835 If a large set is all zero or all ones, it is
4836 probably better to set it using memset (if available) or bzero.
4837 Also, if a large set has just a single range, it may also be
4838 better to first clear all the first clear the set (using
4839 bzero/memset), and set the bits we want. */
4841 /* Check for all zeros. */
4842 if (elt
== NULL_TREE
&& size
> 0)
4845 clear_storage (target
, GEN_INT (size
));
4849 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4850 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4851 bitlength
= size_binop (PLUS_EXPR
,
4852 size_diffop (domain_max
, domain_min
),
4855 nbits
= tree_low_cst (bitlength
, 1);
4857 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4858 are "complicated" (more than one range), initialize (the
4859 constant parts) by copying from a constant. */
4860 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4861 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4863 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4864 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4865 char *bit_buffer
= (char *) alloca (nbits
);
4866 HOST_WIDE_INT word
= 0;
4867 unsigned int bit_pos
= 0;
4868 unsigned int ibit
= 0;
4869 unsigned int offset
= 0; /* In bytes from beginning of set. */
4871 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4874 if (bit_buffer
[ibit
])
4876 if (BYTES_BIG_ENDIAN
)
4877 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4879 word
|= 1 << bit_pos
;
4883 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4885 if (word
!= 0 || ! cleared
)
4887 rtx datum
= GEN_INT (word
);
4890 /* The assumption here is that it is safe to use
4891 XEXP if the set is multi-word, but not if
4892 it's single-word. */
4893 if (GET_CODE (target
) == MEM
)
4894 to_rtx
= adjust_address (target
, mode
, offset
);
4895 else if (offset
== 0)
4899 emit_move_insn (to_rtx
, datum
);
4906 offset
+= set_word_size
/ BITS_PER_UNIT
;
4911 /* Don't bother clearing storage if the set is all ones. */
4912 if (TREE_CHAIN (elt
) != NULL_TREE
4913 || (TREE_PURPOSE (elt
) == NULL_TREE
4915 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4916 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4917 || (tree_low_cst (TREE_VALUE (elt
), 0)
4918 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4919 != (HOST_WIDE_INT
) nbits
))))
4920 clear_storage (target
, expr_size (exp
));
4922 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4924 /* Start of range of element or NULL. */
4925 tree startbit
= TREE_PURPOSE (elt
);
4926 /* End of range of element, or element value. */
4927 tree endbit
= TREE_VALUE (elt
);
4928 #ifdef TARGET_MEM_FUNCTIONS
4929 HOST_WIDE_INT startb
, endb
;
4931 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4933 bitlength_rtx
= expand_expr (bitlength
,
4934 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4936 /* Handle non-range tuple element like [ expr ]. */
4937 if (startbit
== NULL_TREE
)
4939 startbit
= save_expr (endbit
);
4943 startbit
= convert (sizetype
, startbit
);
4944 endbit
= convert (sizetype
, endbit
);
4945 if (! integer_zerop (domain_min
))
4947 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4948 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4950 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4951 EXPAND_CONST_ADDRESS
);
4952 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4953 EXPAND_CONST_ADDRESS
);
4959 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
4960 (GET_MODE (target
), 0),
4963 emit_move_insn (targetx
, target
);
4966 else if (GET_CODE (target
) == MEM
)
4971 #ifdef TARGET_MEM_FUNCTIONS
4972 /* Optimization: If startbit and endbit are
4973 constants divisible by BITS_PER_UNIT,
4974 call memset instead. */
4975 if (TREE_CODE (startbit
) == INTEGER_CST
4976 && TREE_CODE (endbit
) == INTEGER_CST
4977 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4978 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4980 emit_library_call (memset_libfunc
, LCT_NORMAL
,
4982 plus_constant (XEXP (targetx
, 0),
4983 startb
/ BITS_PER_UNIT
),
4985 constm1_rtx
, TYPE_MODE (integer_type_node
),
4986 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4987 TYPE_MODE (sizetype
));
4991 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4992 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
4993 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
4994 startbit_rtx
, TYPE_MODE (sizetype
),
4995 endbit_rtx
, TYPE_MODE (sizetype
));
4998 emit_move_insn (target
, targetx
);
5006 /* Store the value of EXP (an expression tree)
5007 into a subfield of TARGET which has mode MODE and occupies
5008 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5009 If MODE is VOIDmode, it means that we are storing into a bit-field.
5011 If VALUE_MODE is VOIDmode, return nothing in particular.
5012 UNSIGNEDP is not used in this case.
5014 Otherwise, return an rtx for the value stored. This rtx
5015 has mode VALUE_MODE if that is convenient to do.
5016 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5018 TYPE is the type of the underlying object,
5020 ALIAS_SET is the alias set for the destination. This value will
5021 (in general) be different from that for TARGET, since TARGET is a
5022 reference to the containing structure. */
5025 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5028 HOST_WIDE_INT bitsize
;
5029 HOST_WIDE_INT bitpos
;
5030 enum machine_mode mode
;
5032 enum machine_mode value_mode
;
5037 HOST_WIDE_INT width_mask
= 0;
5039 if (TREE_CODE (exp
) == ERROR_MARK
)
5042 /* If we have nothing to store, do nothing unless the expression has
5045 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5046 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5047 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5049 /* If we are storing into an unaligned field of an aligned union that is
5050 in a register, we may have the mode of TARGET being an integer mode but
5051 MODE == BLKmode. In that case, get an aligned object whose size and
5052 alignment are the same as TARGET and store TARGET into it (we can avoid
5053 the store if the field being stored is the entire width of TARGET). Then
5054 call ourselves recursively to store the field into a BLKmode version of
5055 that object. Finally, load from the object into TARGET. This is not
5056 very efficient in general, but should only be slightly more expensive
5057 than the otherwise-required unaligned accesses. Perhaps this can be
5058 cleaned up later. */
5061 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5065 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5067 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5069 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5070 emit_move_insn (object
, target
);
5072 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5075 emit_move_insn (target
, object
);
5077 /* We want to return the BLKmode version of the data. */
5081 if (GET_CODE (target
) == CONCAT
)
5083 /* We're storing into a struct containing a single __complex. */
5087 return store_expr (exp
, target
, 0);
5090 /* If the structure is in a register or if the component
5091 is a bit field, we cannot use addressing to access it.
5092 Use bit-field techniques or SUBREG to store in it. */
5094 if (mode
== VOIDmode
5095 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5096 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5097 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5098 || GET_CODE (target
) == REG
5099 || GET_CODE (target
) == SUBREG
5100 /* If the field isn't aligned enough to store as an ordinary memref,
5101 store it as a bit field. */
5102 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5103 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5104 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5105 /* If the RHS and field are a constant size and the size of the
5106 RHS isn't the same size as the bitfield, we must use bitfield
5109 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5110 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5112 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5114 /* If BITSIZE is narrower than the size of the type of EXP
5115 we will be narrowing TEMP. Normally, what's wanted are the
5116 low-order bits. However, if EXP's type is a record and this is
5117 big-endian machine, we want the upper BITSIZE bits. */
5118 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5119 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5120 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5121 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5122 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5126 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5128 if (mode
!= VOIDmode
&& mode
!= BLKmode
5129 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5130 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5132 /* If the modes of TARGET and TEMP are both BLKmode, both
5133 must be in memory and BITPOS must be aligned on a byte
5134 boundary. If so, we simply do a block copy. */
5135 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5137 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5138 || bitpos
% BITS_PER_UNIT
!= 0)
5141 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5142 emit_block_move (target
, temp
,
5143 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5146 return value_mode
== VOIDmode
? const0_rtx
: target
;
5149 /* Store the value in the bitfield. */
5150 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5151 int_size_in_bytes (type
));
5153 if (value_mode
!= VOIDmode
)
5155 /* The caller wants an rtx for the value.
5156 If possible, avoid refetching from the bitfield itself. */
5158 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5161 enum machine_mode tmode
;
5163 tmode
= GET_MODE (temp
);
5164 if (tmode
== VOIDmode
)
5168 return expand_and (tmode
, temp
,
5169 gen_int_mode (width_mask
, tmode
),
5172 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5173 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5174 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5177 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5178 NULL_RTX
, value_mode
, VOIDmode
,
5179 int_size_in_bytes (type
));
5185 rtx addr
= XEXP (target
, 0);
5186 rtx to_rtx
= target
;
5188 /* If a value is wanted, it must be the lhs;
5189 so make the address stable for multiple use. */
5191 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5192 && ! CONSTANT_ADDRESS_P (addr
)
5193 /* A frame-pointer reference is already stable. */
5194 && ! (GET_CODE (addr
) == PLUS
5195 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5196 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5197 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5198 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5200 /* Now build a reference to just the desired component. */
5202 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5204 if (to_rtx
== target
)
5205 to_rtx
= copy_rtx (to_rtx
);
5207 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5208 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5209 set_mem_alias_set (to_rtx
, alias_set
);
5211 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5215 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5216 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5217 codes and find the ultimate containing object, which we return.
5219 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5220 bit position, and *PUNSIGNEDP to the signedness of the field.
5221 If the position of the field is variable, we store a tree
5222 giving the variable offset (in units) in *POFFSET.
5223 This offset is in addition to the bit position.
5224 If the position is not variable, we store 0 in *POFFSET.
5226 If any of the extraction expressions is volatile,
5227 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5229 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5230 is a mode that can be used to access the field. In that case, *PBITSIZE
5233 If the field describes a variable-sized object, *PMODE is set to
5234 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5235 this case, but the address of the object can be found. */
5238 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5239 punsignedp
, pvolatilep
)
5241 HOST_WIDE_INT
*pbitsize
;
5242 HOST_WIDE_INT
*pbitpos
;
5244 enum machine_mode
*pmode
;
5249 enum machine_mode mode
= VOIDmode
;
5250 tree offset
= size_zero_node
;
5251 tree bit_offset
= bitsize_zero_node
;
5252 tree placeholder_ptr
= 0;
5255 /* First get the mode, signedness, and size. We do this from just the
5256 outermost expression. */
5257 if (TREE_CODE (exp
) == COMPONENT_REF
)
5259 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5260 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5261 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5263 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5265 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5267 size_tree
= TREE_OPERAND (exp
, 1);
5268 *punsignedp
= TREE_UNSIGNED (exp
);
5272 mode
= TYPE_MODE (TREE_TYPE (exp
));
5273 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5275 if (mode
== BLKmode
)
5276 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5278 *pbitsize
= GET_MODE_BITSIZE (mode
);
5283 if (! host_integerp (size_tree
, 1))
5284 mode
= BLKmode
, *pbitsize
= -1;
5286 *pbitsize
= tree_low_cst (size_tree
, 1);
5289 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5290 and find the ultimate containing object. */
5293 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5294 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5295 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5297 tree field
= TREE_OPERAND (exp
, 1);
5298 tree this_offset
= DECL_FIELD_OFFSET (field
);
5300 /* If this field hasn't been filled in yet, don't go
5301 past it. This should only happen when folding expressions
5302 made during type construction. */
5303 if (this_offset
== 0)
5305 else if (! TREE_CONSTANT (this_offset
)
5306 && contains_placeholder_p (this_offset
))
5307 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5309 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5310 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5311 DECL_FIELD_BIT_OFFSET (field
));
5313 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5316 else if (TREE_CODE (exp
) == ARRAY_REF
5317 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5319 tree index
= TREE_OPERAND (exp
, 1);
5320 tree array
= TREE_OPERAND (exp
, 0);
5321 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5322 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5323 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5325 /* We assume all arrays have sizes that are a multiple of a byte.
5326 First subtract the lower bound, if any, in the type of the
5327 index, then convert to sizetype and multiply by the size of the
5329 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5330 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5333 /* If the index has a self-referential type, pass it to a
5334 WITH_RECORD_EXPR; if the component size is, pass our
5335 component to one. */
5336 if (! TREE_CONSTANT (index
)
5337 && contains_placeholder_p (index
))
5338 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5339 if (! TREE_CONSTANT (unit_size
)
5340 && contains_placeholder_p (unit_size
))
5341 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5343 offset
= size_binop (PLUS_EXPR
, offset
,
5344 size_binop (MULT_EXPR
,
5345 convert (sizetype
, index
),
5349 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5351 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5353 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5354 We might have been called from tree optimization where we
5355 haven't set up an object yet. */
5363 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5364 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5365 && ! ((TREE_CODE (exp
) == NOP_EXPR
5366 || TREE_CODE (exp
) == CONVERT_EXPR
)
5367 && (TYPE_MODE (TREE_TYPE (exp
))
5368 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5371 /* If any reference in the chain is volatile, the effect is volatile. */
5372 if (TREE_THIS_VOLATILE (exp
))
5375 exp
= TREE_OPERAND (exp
, 0);
5378 /* If OFFSET is constant, see if we can return the whole thing as a
5379 constant bit position. Otherwise, split it up. */
5380 if (host_integerp (offset
, 0)
5381 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5383 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5384 && host_integerp (tem
, 0))
5385 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5387 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5393 /* Return 1 if T is an expression that get_inner_reference handles. */
5396 handled_component_p (t
)
5399 switch (TREE_CODE (t
))
5404 case ARRAY_RANGE_REF
:
5405 case NON_LVALUE_EXPR
:
5406 case VIEW_CONVERT_EXPR
:
5411 return (TYPE_MODE (TREE_TYPE (t
))
5412 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5419 /* Given an rtx VALUE that may contain additions and multiplications, return
5420 an equivalent value that just refers to a register, memory, or constant.
5421 This is done by generating instructions to perform the arithmetic and
5422 returning a pseudo-register containing the value.
5424 The returned value may be a REG, SUBREG, MEM or constant. */
5427 force_operand (value
, target
)
5431 /* Use a temporary to force order of execution of calls to
5435 /* Use subtarget as the target for operand 0 of a binary operation. */
5436 rtx subtarget
= get_subtarget (target
);
5438 /* Check for a PIC address load. */
5439 if ((GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5440 && XEXP (value
, 0) == pic_offset_table_rtx
5441 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5442 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5443 || GET_CODE (XEXP (value
, 1)) == CONST
))
5446 subtarget
= gen_reg_rtx (GET_MODE (value
));
5447 emit_move_insn (subtarget
, value
);
5451 if (GET_CODE (value
) == PLUS
)
5452 binoptab
= add_optab
;
5453 else if (GET_CODE (value
) == MINUS
)
5454 binoptab
= sub_optab
;
5455 else if (GET_CODE (value
) == MULT
)
5457 op2
= XEXP (value
, 1);
5458 if (!CONSTANT_P (op2
)
5459 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5461 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5462 return expand_mult (GET_MODE (value
), tmp
,
5463 force_operand (op2
, NULL_RTX
),
5469 op2
= XEXP (value
, 1);
5470 if (!CONSTANT_P (op2
)
5471 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5473 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5475 binoptab
= add_optab
;
5476 op2
= negate_rtx (GET_MODE (value
), op2
);
5479 /* Check for an addition with OP2 a constant integer and our first
5480 operand a PLUS of a virtual register and something else. In that
5481 case, we want to emit the sum of the virtual register and the
5482 constant first and then add the other value. This allows virtual
5483 register instantiation to simply modify the constant rather than
5484 creating another one around this addition. */
5485 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5486 && GET_CODE (XEXP (value
, 0)) == PLUS
5487 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5488 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5489 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5491 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5492 XEXP (XEXP (value
, 0), 0), op2
,
5493 subtarget
, 0, OPTAB_LIB_WIDEN
);
5494 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5495 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5496 target
, 0, OPTAB_LIB_WIDEN
);
5499 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5500 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5501 force_operand (op2
, NULL_RTX
),
5502 target
, 0, OPTAB_LIB_WIDEN
);
5503 /* We give UNSIGNEDP = 0 to expand_binop
5504 because the only operations we are expanding here are signed ones. */
5507 #ifdef INSN_SCHEDULING
5508 /* On machines that have insn scheduling, we want all memory reference to be
5509 explicit, so we need to deal with such paradoxical SUBREGs. */
5510 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5511 && (GET_MODE_SIZE (GET_MODE (value
))
5512 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5514 = simplify_gen_subreg (GET_MODE (value
),
5515 force_reg (GET_MODE (SUBREG_REG (value
)),
5516 force_operand (SUBREG_REG (value
),
5518 GET_MODE (SUBREG_REG (value
)),
5519 SUBREG_BYTE (value
));
5525 /* Subroutine of expand_expr: return nonzero iff there is no way that
5526 EXP can reference X, which is being modified. TOP_P is nonzero if this
5527 call is going to be used to determine whether we need a temporary
5528 for EXP, as opposed to a recursive call to this function.
5530 It is always safe for this routine to return zero since it merely
5531 searches for optimization opportunities. */
5534 safe_from_p (x
, exp
, top_p
)
5541 static tree save_expr_list
;
5544 /* If EXP has varying size, we MUST use a target since we currently
5545 have no way of allocating temporaries of variable size
5546 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5547 So we assume here that something at a higher level has prevented a
5548 clash. This is somewhat bogus, but the best we can do. Only
5549 do this when X is BLKmode and when we are at the top level. */
5550 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5552 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5553 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5554 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5556 && GET_MODE (x
) == BLKmode
)
5557 /* If X is in the outgoing argument area, it is always safe. */
5558 || (GET_CODE (x
) == MEM
5559 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5560 || (GET_CODE (XEXP (x
, 0)) == PLUS
5561 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5564 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5565 find the underlying pseudo. */
5566 if (GET_CODE (x
) == SUBREG
)
5569 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5573 /* A SAVE_EXPR might appear many times in the expression passed to the
5574 top-level safe_from_p call, and if it has a complex subexpression,
5575 examining it multiple times could result in a combinatorial explosion.
5576 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5577 with optimization took about 28 minutes to compile -- even though it was
5578 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5579 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5580 we have processed. Note that the only test of top_p was above. */
5589 rtn
= safe_from_p (x
, exp
, 0);
5591 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5592 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5597 /* Now look at our tree code and possibly recurse. */
5598 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5601 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5608 if (TREE_CODE (exp
) == TREE_LIST
)
5609 return ((TREE_VALUE (exp
) == 0
5610 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5611 && (TREE_CHAIN (exp
) == 0
5612 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5613 else if (TREE_CODE (exp
) == ERROR_MARK
)
5614 return 1; /* An already-visited SAVE_EXPR? */
5619 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5623 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5624 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5628 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5629 the expression. If it is set, we conflict iff we are that rtx or
5630 both are in memory. Otherwise, we check all operands of the
5631 expression recursively. */
5633 switch (TREE_CODE (exp
))
5636 /* If the operand is static or we are static, we can't conflict.
5637 Likewise if we don't conflict with the operand at all. */
5638 if (staticp (TREE_OPERAND (exp
, 0))
5639 || TREE_STATIC (exp
)
5640 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5643 /* Otherwise, the only way this can conflict is if we are taking
5644 the address of a DECL a that address if part of X, which is
5646 exp
= TREE_OPERAND (exp
, 0);
5649 if (!DECL_RTL_SET_P (exp
)
5650 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5653 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5658 if (GET_CODE (x
) == MEM
5659 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5660 get_alias_set (exp
)))
5665 /* Assume that the call will clobber all hard registers and
5667 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5668 || GET_CODE (x
) == MEM
)
5673 /* If a sequence exists, we would have to scan every instruction
5674 in the sequence to see if it was safe. This is probably not
5676 if (RTL_EXPR_SEQUENCE (exp
))
5679 exp_rtl
= RTL_EXPR_RTL (exp
);
5682 case WITH_CLEANUP_EXPR
:
5683 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5686 case CLEANUP_POINT_EXPR
:
5687 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5690 exp_rtl
= SAVE_EXPR_RTL (exp
);
5694 /* If we've already scanned this, don't do it again. Otherwise,
5695 show we've scanned it and record for clearing the flag if we're
5697 if (TREE_PRIVATE (exp
))
5700 TREE_PRIVATE (exp
) = 1;
5701 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5703 TREE_PRIVATE (exp
) = 0;
5707 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5711 /* The only operand we look at is operand 1. The rest aren't
5712 part of the expression. */
5713 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5715 case METHOD_CALL_EXPR
:
5716 /* This takes an rtx argument, but shouldn't appear here. */
5723 /* If we have an rtx, we do not need to scan our operands. */
5727 nops
= first_rtl_op (TREE_CODE (exp
));
5728 for (i
= 0; i
< nops
; i
++)
5729 if (TREE_OPERAND (exp
, i
) != 0
5730 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5733 /* If this is a language-specific tree code, it may require
5734 special handling. */
5735 if ((unsigned int) TREE_CODE (exp
)
5736 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5737 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5741 /* If we have an rtl, find any enclosed object. Then see if we conflict
5745 if (GET_CODE (exp_rtl
) == SUBREG
)
5747 exp_rtl
= SUBREG_REG (exp_rtl
);
5748 if (GET_CODE (exp_rtl
) == REG
5749 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5753 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5754 are memory and they conflict. */
5755 return ! (rtx_equal_p (x
, exp_rtl
)
5756 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5757 && true_dependence (exp_rtl
, VOIDmode
, x
,
5758 rtx_addr_varies_p
)));
5761 /* If we reach here, it is safe. */
5765 /* Subroutine of expand_expr: return rtx if EXP is a
5766 variable or parameter; else return 0. */
5773 switch (TREE_CODE (exp
))
5777 return DECL_RTL (exp
);
5783 #ifdef MAX_INTEGER_COMPUTATION_MODE
5786 check_max_integer_computation_mode (exp
)
5789 enum tree_code code
;
5790 enum machine_mode mode
;
5792 /* Strip any NOPs that don't change the mode. */
5794 code
= TREE_CODE (exp
);
5796 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5797 if (code
== NOP_EXPR
5798 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5801 /* First check the type of the overall operation. We need only look at
5802 unary, binary and relational operations. */
5803 if (TREE_CODE_CLASS (code
) == '1'
5804 || TREE_CODE_CLASS (code
) == '2'
5805 || TREE_CODE_CLASS (code
) == '<')
5807 mode
= TYPE_MODE (TREE_TYPE (exp
));
5808 if (GET_MODE_CLASS (mode
) == MODE_INT
5809 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5810 internal_error ("unsupported wide integer operation");
5813 /* Check operand of a unary op. */
5814 if (TREE_CODE_CLASS (code
) == '1')
5816 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5817 if (GET_MODE_CLASS (mode
) == MODE_INT
5818 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5819 internal_error ("unsupported wide integer operation");
5822 /* Check operands of a binary/comparison op. */
5823 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5825 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5826 if (GET_MODE_CLASS (mode
) == MODE_INT
5827 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5828 internal_error ("unsupported wide integer operation");
5830 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5831 if (GET_MODE_CLASS (mode
) == MODE_INT
5832 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5833 internal_error ("unsupported wide integer operation");
5838 /* Return the highest power of two that EXP is known to be a multiple of.
5839 This is used in updating alignment of MEMs in array references. */
5841 static HOST_WIDE_INT
5842 highest_pow2_factor (exp
)
5845 HOST_WIDE_INT c0
, c1
;
5847 switch (TREE_CODE (exp
))
5850 /* We can find the lowest bit that's a one. If the low
5851 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5852 We need to handle this case since we can find it in a COND_EXPR,
5853 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5854 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5856 if (TREE_CONSTANT_OVERFLOW (exp
))
5857 return BIGGEST_ALIGNMENT
;
5860 /* Note: tree_low_cst is intentionally not used here,
5861 we don't care about the upper bits. */
5862 c0
= TREE_INT_CST_LOW (exp
);
5864 return c0
? c0
: BIGGEST_ALIGNMENT
;
5868 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5869 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5870 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5871 return MIN (c0
, c1
);
5874 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5875 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5878 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5880 if (integer_pow2p (TREE_OPERAND (exp
, 1))
5881 && host_integerp (TREE_OPERAND (exp
, 1), 1))
5883 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5884 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
5885 return MAX (1, c0
/ c1
);
5889 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
5890 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
5891 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
5894 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
5897 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5898 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
5899 return MIN (c0
, c1
);
5908 /* Return an object on the placeholder list that matches EXP, a
5909 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5910 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5911 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5912 is a location which initially points to a starting location in the
5913 placeholder list (zero means start of the list) and where a pointer into
5914 the placeholder list at which the object is found is placed. */
5917 find_placeholder (exp
, plist
)
5921 tree type
= TREE_TYPE (exp
);
5922 tree placeholder_expr
;
5924 for (placeholder_expr
5925 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
5926 placeholder_expr
!= 0;
5927 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5929 tree need_type
= TYPE_MAIN_VARIANT (type
);
5932 /* Find the outermost reference that is of the type we want. If none,
5933 see if any object has a type that is a pointer to the type we
5935 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
5936 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
5937 || TREE_CODE (elt
) == COND_EXPR
)
5938 ? TREE_OPERAND (elt
, 1)
5939 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5940 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5941 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5942 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5943 ? TREE_OPERAND (elt
, 0) : 0))
5944 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
5947 *plist
= placeholder_expr
;
5951 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
5953 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5954 || TREE_CODE (elt
) == COND_EXPR
)
5955 ? TREE_OPERAND (elt
, 1)
5956 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5957 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5958 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5959 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5960 ? TREE_OPERAND (elt
, 0) : 0))
5961 if (POINTER_TYPE_P (TREE_TYPE (elt
))
5962 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
5966 *plist
= placeholder_expr
;
5967 return build1 (INDIRECT_REF
, need_type
, elt
);
5974 /* expand_expr: generate code for computing expression EXP.
5975 An rtx for the computed value is returned. The value is never null.
5976 In the case of a void EXP, const0_rtx is returned.
5978 The value may be stored in TARGET if TARGET is nonzero.
5979 TARGET is just a suggestion; callers must assume that
5980 the rtx returned may not be the same as TARGET.
5982 If TARGET is CONST0_RTX, it means that the value will be ignored.
5984 If TMODE is not VOIDmode, it suggests generating the
5985 result in mode TMODE. But this is done only when convenient.
5986 Otherwise, TMODE is ignored and the value generated in its natural mode.
5987 TMODE is just a suggestion; callers must assume that
5988 the rtx returned may not have mode TMODE.
5990 Note that TARGET may have neither TMODE nor MODE. In that case, it
5991 probably will not be used.
5993 If MODIFIER is EXPAND_SUM then when EXP is an addition
5994 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5995 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5996 products as above, or REG or MEM, or constant.
5997 Ordinarily in such cases we would output mul or add instructions
5998 and then return a pseudo reg containing the sum.
6000 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6001 it also marks a label as absolutely required (it can't be dead).
6002 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6003 This is used for outputting expressions used in initializers.
6005 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6006 with a constant address even if that address is not normally legitimate.
6007 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6010 expand_expr (exp
, target
, tmode
, modifier
)
6013 enum machine_mode tmode
;
6014 enum expand_modifier modifier
;
6017 tree type
= TREE_TYPE (exp
);
6018 int unsignedp
= TREE_UNSIGNED (type
);
6019 enum machine_mode mode
;
6020 enum tree_code code
= TREE_CODE (exp
);
6022 rtx subtarget
, original_target
;
6026 /* Handle ERROR_MARK before anybody tries to access its type. */
6027 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6029 op0
= CONST0_RTX (tmode
);
6035 mode
= TYPE_MODE (type
);
6036 /* Use subtarget as the target for operand 0 of a binary operation. */
6037 subtarget
= get_subtarget (target
);
6038 original_target
= target
;
6039 ignore
= (target
== const0_rtx
6040 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6041 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6042 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6043 && TREE_CODE (type
) == VOID_TYPE
));
6045 /* If we are going to ignore this result, we need only do something
6046 if there is a side-effect somewhere in the expression. If there
6047 is, short-circuit the most common cases here. Note that we must
6048 not call expand_expr with anything but const0_rtx in case this
6049 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6053 if (! TREE_SIDE_EFFECTS (exp
))
6056 /* Ensure we reference a volatile object even if value is ignored, but
6057 don't do this if all we are doing is taking its address. */
6058 if (TREE_THIS_VOLATILE (exp
)
6059 && TREE_CODE (exp
) != FUNCTION_DECL
6060 && mode
!= VOIDmode
&& mode
!= BLKmode
6061 && modifier
!= EXPAND_CONST_ADDRESS
)
6063 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6064 if (GET_CODE (temp
) == MEM
)
6065 temp
= copy_to_reg (temp
);
6069 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6070 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6071 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6074 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6075 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6077 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6078 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6081 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6082 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6083 /* If the second operand has no side effects, just evaluate
6085 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6087 else if (code
== BIT_FIELD_REF
)
6089 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6090 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6091 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6098 #ifdef MAX_INTEGER_COMPUTATION_MODE
6099 /* Only check stuff here if the mode we want is different from the mode
6100 of the expression; if it's the same, check_max_integer_computiation_mode
6101 will handle it. Do we really need to check this stuff at all? */
6104 && GET_MODE (target
) != mode
6105 && TREE_CODE (exp
) != INTEGER_CST
6106 && TREE_CODE (exp
) != PARM_DECL
6107 && TREE_CODE (exp
) != ARRAY_REF
6108 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6109 && TREE_CODE (exp
) != COMPONENT_REF
6110 && TREE_CODE (exp
) != BIT_FIELD_REF
6111 && TREE_CODE (exp
) != INDIRECT_REF
6112 && TREE_CODE (exp
) != CALL_EXPR
6113 && TREE_CODE (exp
) != VAR_DECL
6114 && TREE_CODE (exp
) != RTL_EXPR
)
6116 enum machine_mode mode
= GET_MODE (target
);
6118 if (GET_MODE_CLASS (mode
) == MODE_INT
6119 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6120 internal_error ("unsupported wide integer operation");
6124 && TREE_CODE (exp
) != INTEGER_CST
6125 && TREE_CODE (exp
) != PARM_DECL
6126 && TREE_CODE (exp
) != ARRAY_REF
6127 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6128 && TREE_CODE (exp
) != COMPONENT_REF
6129 && TREE_CODE (exp
) != BIT_FIELD_REF
6130 && TREE_CODE (exp
) != INDIRECT_REF
6131 && TREE_CODE (exp
) != VAR_DECL
6132 && TREE_CODE (exp
) != CALL_EXPR
6133 && TREE_CODE (exp
) != RTL_EXPR
6134 && GET_MODE_CLASS (tmode
) == MODE_INT
6135 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6136 internal_error ("unsupported wide integer operation");
6138 check_max_integer_computation_mode (exp
);
6141 /* If will do cse, generate all results into pseudo registers
6142 since 1) that allows cse to find more things
6143 and 2) otherwise cse could produce an insn the machine
6144 cannot support. And exception is a CONSTRUCTOR into a multi-word
6145 MEM: that's much more likely to be most efficient into the MEM. */
6147 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6148 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6149 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
))
6156 tree function
= decl_function_context (exp
);
6157 /* Handle using a label in a containing function. */
6158 if (function
!= current_function_decl
6159 && function
!= inline_function_decl
&& function
!= 0)
6161 struct function
*p
= find_function_data (function
);
6162 p
->expr
->x_forced_labels
6163 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6164 p
->expr
->x_forced_labels
);
6168 if (modifier
== EXPAND_INITIALIZER
)
6169 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6174 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6175 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6176 if (function
!= current_function_decl
6177 && function
!= inline_function_decl
&& function
!= 0)
6178 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6183 if (DECL_RTL (exp
) == 0)
6185 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6186 return CONST0_RTX (mode
);
6189 /* ... fall through ... */
6192 /* If a static var's type was incomplete when the decl was written,
6193 but the type is complete now, lay out the decl now. */
6194 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6195 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6197 rtx value
= DECL_RTL_IF_SET (exp
);
6199 layout_decl (exp
, 0);
6201 /* If the RTL was already set, update its mode and memory
6205 PUT_MODE (value
, DECL_MODE (exp
));
6206 SET_DECL_RTL (exp
, 0);
6207 set_mem_attributes (value
, exp
, 1);
6208 SET_DECL_RTL (exp
, value
);
6212 /* ... fall through ... */
6216 if (DECL_RTL (exp
) == 0)
6219 /* Ensure variable marked as used even if it doesn't go through
6220 a parser. If it hasn't be used yet, write out an external
6222 if (! TREE_USED (exp
))
6224 assemble_external (exp
);
6225 TREE_USED (exp
) = 1;
6228 /* Show we haven't gotten RTL for this yet. */
6231 /* Handle variables inherited from containing functions. */
6232 context
= decl_function_context (exp
);
6234 /* We treat inline_function_decl as an alias for the current function
6235 because that is the inline function whose vars, types, etc.
6236 are being merged into the current function.
6237 See expand_inline_function. */
6239 if (context
!= 0 && context
!= current_function_decl
6240 && context
!= inline_function_decl
6241 /* If var is static, we don't need a static chain to access it. */
6242 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6243 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6247 /* Mark as non-local and addressable. */
6248 DECL_NONLOCAL (exp
) = 1;
6249 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6251 (*lang_hooks
.mark_addressable
) (exp
);
6252 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6254 addr
= XEXP (DECL_RTL (exp
), 0);
6255 if (GET_CODE (addr
) == MEM
)
6257 = replace_equiv_address (addr
,
6258 fix_lexical_addr (XEXP (addr
, 0), exp
));
6260 addr
= fix_lexical_addr (addr
, exp
);
6262 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6265 /* This is the case of an array whose size is to be determined
6266 from its initializer, while the initializer is still being parsed.
6269 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6270 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6271 temp
= validize_mem (DECL_RTL (exp
));
6273 /* If DECL_RTL is memory, we are in the normal case and either
6274 the address is not valid or it is not a register and -fforce-addr
6275 is specified, get the address into a register. */
6277 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6278 && modifier
!= EXPAND_CONST_ADDRESS
6279 && modifier
!= EXPAND_SUM
6280 && modifier
!= EXPAND_INITIALIZER
6281 && (! memory_address_p (DECL_MODE (exp
),
6282 XEXP (DECL_RTL (exp
), 0))
6284 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6285 temp
= replace_equiv_address (DECL_RTL (exp
),
6286 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6288 /* If we got something, return it. But first, set the alignment
6289 if the address is a register. */
6292 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6293 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6298 /* If the mode of DECL_RTL does not match that of the decl, it
6299 must be a promoted value. We return a SUBREG of the wanted mode,
6300 but mark it so that we know that it was already extended. */
6302 if (GET_CODE (DECL_RTL (exp
)) == REG
6303 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6305 /* Get the signedness used for this variable. Ensure we get the
6306 same mode we got when the variable was declared. */
6307 if (GET_MODE (DECL_RTL (exp
))
6308 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6309 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6312 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6313 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6314 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6318 return DECL_RTL (exp
);
6321 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6322 TREE_INT_CST_HIGH (exp
), mode
);
6324 /* ??? If overflow is set, fold will have done an incomplete job,
6325 which can result in (plus xx (const_int 0)), which can get
6326 simplified by validate_replace_rtx during virtual register
6327 instantiation, which can result in unrecognizable insns.
6328 Avoid this by forcing all overflows into registers. */
6329 if (TREE_CONSTANT_OVERFLOW (exp
)
6330 && modifier
!= EXPAND_INITIALIZER
)
6331 temp
= force_reg (mode
, temp
);
6336 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
6339 /* If optimized, generate immediate CONST_DOUBLE
6340 which will be turned into memory by reload if necessary.
6342 We used to force a register so that loop.c could see it. But
6343 this does not allow gen_* patterns to perform optimizations with
6344 the constants. It also produces two insns in cases like "x = 1.0;".
6345 On most machines, floating-point constants are not permitted in
6346 many insns, so we'd end up copying it to a register in any case.
6348 Now, we do the copying in expand_binop, if appropriate. */
6349 return immed_real_const (exp
);
6353 if (! TREE_CST_RTL (exp
))
6354 output_constant_def (exp
, 1);
6356 /* TREE_CST_RTL probably contains a constant address.
6357 On RISC machines where a constant address isn't valid,
6358 make some insns to get that address into a register. */
6359 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6360 && modifier
!= EXPAND_CONST_ADDRESS
6361 && modifier
!= EXPAND_INITIALIZER
6362 && modifier
!= EXPAND_SUM
6363 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6365 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6366 return replace_equiv_address (TREE_CST_RTL (exp
),
6367 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6368 return TREE_CST_RTL (exp
);
6370 case EXPR_WITH_FILE_LOCATION
:
6373 const char *saved_input_filename
= input_filename
;
6374 int saved_lineno
= lineno
;
6375 input_filename
= EXPR_WFL_FILENAME (exp
);
6376 lineno
= EXPR_WFL_LINENO (exp
);
6377 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6378 emit_line_note (input_filename
, lineno
);
6379 /* Possibly avoid switching back and forth here. */
6380 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6381 input_filename
= saved_input_filename
;
6382 lineno
= saved_lineno
;
6387 context
= decl_function_context (exp
);
6389 /* If this SAVE_EXPR was at global context, assume we are an
6390 initialization function and move it into our context. */
6392 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6394 /* We treat inline_function_decl as an alias for the current function
6395 because that is the inline function whose vars, types, etc.
6396 are being merged into the current function.
6397 See expand_inline_function. */
6398 if (context
== current_function_decl
|| context
== inline_function_decl
)
6401 /* If this is non-local, handle it. */
6404 /* The following call just exists to abort if the context is
6405 not of a containing function. */
6406 find_function_data (context
);
6408 temp
= SAVE_EXPR_RTL (exp
);
6409 if (temp
&& GET_CODE (temp
) == REG
)
6411 put_var_into_stack (exp
);
6412 temp
= SAVE_EXPR_RTL (exp
);
6414 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6417 replace_equiv_address (temp
,
6418 fix_lexical_addr (XEXP (temp
, 0), exp
));
6420 if (SAVE_EXPR_RTL (exp
) == 0)
6422 if (mode
== VOIDmode
)
6425 temp
= assign_temp (build_qualified_type (type
,
6427 | TYPE_QUAL_CONST
)),
6430 SAVE_EXPR_RTL (exp
) = temp
;
6431 if (!optimize
&& GET_CODE (temp
) == REG
)
6432 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6435 /* If the mode of TEMP does not match that of the expression, it
6436 must be a promoted value. We pass store_expr a SUBREG of the
6437 wanted mode but mark it so that we know that it was already
6438 extended. Note that `unsignedp' was modified above in
6441 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6443 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6444 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6445 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6448 if (temp
== const0_rtx
)
6449 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6451 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6453 TREE_USED (exp
) = 1;
6456 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6457 must be a promoted value. We return a SUBREG of the wanted mode,
6458 but mark it so that we know that it was already extended. */
6460 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6461 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6463 /* Compute the signedness and make the proper SUBREG. */
6464 promote_mode (type
, mode
, &unsignedp
, 0);
6465 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6466 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6467 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6471 return SAVE_EXPR_RTL (exp
);
6476 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6477 TREE_OPERAND (exp
, 0)
6478 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6482 case PLACEHOLDER_EXPR
:
6484 tree old_list
= placeholder_list
;
6485 tree placeholder_expr
= 0;
6487 exp
= find_placeholder (exp
, &placeholder_expr
);
6491 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6492 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6493 placeholder_list
= old_list
;
6497 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6500 case WITH_RECORD_EXPR
:
6501 /* Put the object on the placeholder list, expand our first operand,
6502 and pop the list. */
6503 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6505 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6507 placeholder_list
= TREE_CHAIN (placeholder_list
);
6511 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6512 expand_goto (TREE_OPERAND (exp
, 0));
6514 expand_computed_goto (TREE_OPERAND (exp
, 0));
6518 expand_exit_loop_if_false (NULL
,
6519 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6522 case LABELED_BLOCK_EXPR
:
6523 if (LABELED_BLOCK_BODY (exp
))
6524 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6525 /* Should perhaps use expand_label, but this is simpler and safer. */
6526 do_pending_stack_adjust ();
6527 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6530 case EXIT_BLOCK_EXPR
:
6531 if (EXIT_BLOCK_RETURN (exp
))
6532 sorry ("returned value in block_exit_expr");
6533 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6538 expand_start_loop (1);
6539 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6547 tree vars
= TREE_OPERAND (exp
, 0);
6548 int vars_need_expansion
= 0;
6550 /* Need to open a binding contour here because
6551 if there are any cleanups they must be contained here. */
6552 expand_start_bindings (2);
6554 /* Mark the corresponding BLOCK for output in its proper place. */
6555 if (TREE_OPERAND (exp
, 2) != 0
6556 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6557 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6559 /* If VARS have not yet been expanded, expand them now. */
6562 if (!DECL_RTL_SET_P (vars
))
6564 vars_need_expansion
= 1;
6567 expand_decl_init (vars
);
6568 vars
= TREE_CHAIN (vars
);
6571 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6573 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6579 if (RTL_EXPR_SEQUENCE (exp
))
6581 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6583 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6584 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6586 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6587 free_temps_for_rtl_expr (exp
);
6588 return RTL_EXPR_RTL (exp
);
6591 /* If we don't need the result, just ensure we evaluate any
6597 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6598 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6603 /* All elts simple constants => refer to a constant in memory. But
6604 if this is a non-BLKmode mode, let it store a field at a time
6605 since that should make a CONST_INT or CONST_DOUBLE when we
6606 fold. Likewise, if we have a target we can use, it is best to
6607 store directly into the target unless the type is large enough
6608 that memcpy will be used. If we are making an initializer and
6609 all operands are constant, put it in memory as well. */
6610 else if ((TREE_STATIC (exp
)
6611 && ((mode
== BLKmode
6612 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6613 || TREE_ADDRESSABLE (exp
)
6614 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6615 && (! MOVE_BY_PIECES_P
6616 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6618 && ! mostly_zeros_p (exp
))))
6619 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6621 rtx constructor
= output_constant_def (exp
, 1);
6623 if (modifier
!= EXPAND_CONST_ADDRESS
6624 && modifier
!= EXPAND_INITIALIZER
6625 && modifier
!= EXPAND_SUM
)
6626 constructor
= validize_mem (constructor
);
6632 /* Handle calls that pass values in multiple non-contiguous
6633 locations. The Irix 6 ABI has examples of this. */
6634 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6635 || GET_CODE (target
) == PARALLEL
)
6637 = assign_temp (build_qualified_type (type
,
6639 | (TREE_READONLY (exp
)
6640 * TYPE_QUAL_CONST
))),
6641 0, TREE_ADDRESSABLE (exp
), 1);
6643 store_constructor (exp
, target
, 0,
6644 int_size_in_bytes (TREE_TYPE (exp
)));
6650 tree exp1
= TREE_OPERAND (exp
, 0);
6652 tree string
= string_constant (exp1
, &index
);
6654 /* Try to optimize reads from const strings. */
6656 && TREE_CODE (string
) == STRING_CST
6657 && TREE_CODE (index
) == INTEGER_CST
6658 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6659 && GET_MODE_CLASS (mode
) == MODE_INT
6660 && GET_MODE_SIZE (mode
) == 1
6661 && modifier
!= EXPAND_WRITE
)
6662 return gen_int_mode (TREE_STRING_POINTER (string
)
6663 [TREE_INT_CST_LOW (index
)], mode
);
6665 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6666 op0
= memory_address (mode
, op0
);
6667 temp
= gen_rtx_MEM (mode
, op0
);
6668 set_mem_attributes (temp
, exp
, 0);
6670 /* If we are writing to this object and its type is a record with
6671 readonly fields, we must mark it as readonly so it will
6672 conflict with readonly references to those fields. */
6673 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6674 RTX_UNCHANGING_P (temp
) = 1;
6680 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6684 tree array
= TREE_OPERAND (exp
, 0);
6685 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6686 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6687 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6690 /* Optimize the special-case of a zero lower bound.
6692 We convert the low_bound to sizetype to avoid some problems
6693 with constant folding. (E.g. suppose the lower bound is 1,
6694 and its mode is QI. Without the conversion, (ARRAY
6695 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6696 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6698 if (! integer_zerop (low_bound
))
6699 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6701 /* Fold an expression like: "foo"[2].
6702 This is not done in fold so it won't happen inside &.
6703 Don't fold if this is for wide characters since it's too
6704 difficult to do correctly and this is a very rare case. */
6706 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6707 && TREE_CODE (array
) == STRING_CST
6708 && TREE_CODE (index
) == INTEGER_CST
6709 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6710 && GET_MODE_CLASS (mode
) == MODE_INT
6711 && GET_MODE_SIZE (mode
) == 1)
6712 return gen_int_mode (TREE_STRING_POINTER (array
)
6713 [TREE_INT_CST_LOW (index
)], mode
);
6715 /* If this is a constant index into a constant array,
6716 just get the value from the array. Handle both the cases when
6717 we have an explicit constructor and when our operand is a variable
6718 that was declared const. */
6720 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6721 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6722 && TREE_CODE (index
) == INTEGER_CST
6723 && 0 > compare_tree_int (index
,
6724 list_length (CONSTRUCTOR_ELTS
6725 (TREE_OPERAND (exp
, 0)))))
6729 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6730 i
= TREE_INT_CST_LOW (index
);
6731 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6735 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6739 else if (optimize
>= 1
6740 && modifier
!= EXPAND_CONST_ADDRESS
6741 && modifier
!= EXPAND_INITIALIZER
6742 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6743 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6744 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6746 if (TREE_CODE (index
) == INTEGER_CST
)
6748 tree init
= DECL_INITIAL (array
);
6750 if (TREE_CODE (init
) == CONSTRUCTOR
)
6754 for (elem
= CONSTRUCTOR_ELTS (init
);
6756 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6757 elem
= TREE_CHAIN (elem
))
6760 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6761 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6764 else if (TREE_CODE (init
) == STRING_CST
6765 && 0 > compare_tree_int (index
,
6766 TREE_STRING_LENGTH (init
)))
6768 tree type
= TREE_TYPE (TREE_TYPE (init
));
6769 enum machine_mode mode
= TYPE_MODE (type
);
6771 if (GET_MODE_CLASS (mode
) == MODE_INT
6772 && GET_MODE_SIZE (mode
) == 1)
6773 return gen_int_mode (TREE_STRING_POINTER (init
)
6774 [TREE_INT_CST_LOW (index
)], mode
);
6783 case ARRAY_RANGE_REF
:
6784 /* If the operand is a CONSTRUCTOR, we can just extract the
6785 appropriate field if it is present. Don't do this if we have
6786 already written the data since we want to refer to that copy
6787 and varasm.c assumes that's what we'll do. */
6788 if (code
== COMPONENT_REF
6789 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6790 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6794 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6795 elt
= TREE_CHAIN (elt
))
6796 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6797 /* We can normally use the value of the field in the
6798 CONSTRUCTOR. However, if this is a bitfield in
6799 an integral mode that we can fit in a HOST_WIDE_INT,
6800 we must mask only the number of bits in the bitfield,
6801 since this is done implicitly by the constructor. If
6802 the bitfield does not meet either of those conditions,
6803 we can't do this optimization. */
6804 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6805 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6807 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6808 <= HOST_BITS_PER_WIDE_INT
))))
6810 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6811 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6813 HOST_WIDE_INT bitsize
6814 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6815 enum machine_mode imode
6816 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6818 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6820 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6821 op0
= expand_and (imode
, op0
, op1
, target
);
6826 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6829 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6831 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6841 enum machine_mode mode1
;
6842 HOST_WIDE_INT bitsize
, bitpos
;
6845 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6846 &mode1
, &unsignedp
, &volatilep
);
6849 /* If we got back the original object, something is wrong. Perhaps
6850 we are evaluating an expression too early. In any event, don't
6851 infinitely recurse. */
6855 /* If TEM's type is a union of variable size, pass TARGET to the inner
6856 computation, since it will need a temporary and TARGET is known
6857 to have to do. This occurs in unchecked conversion in Ada. */
6861 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6862 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6864 ? target
: NULL_RTX
),
6866 (modifier
== EXPAND_INITIALIZER
6867 || modifier
== EXPAND_CONST_ADDRESS
)
6868 ? modifier
: EXPAND_NORMAL
);
6870 /* If this is a constant, put it into a register if it is a
6871 legitimate constant and OFFSET is 0 and memory if it isn't. */
6872 if (CONSTANT_P (op0
))
6874 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6875 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6877 op0
= force_reg (mode
, op0
);
6879 op0
= validize_mem (force_const_mem (mode
, op0
));
6884 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6886 /* If this object is in a register, put it into memory.
6887 This case can't occur in C, but can in Ada if we have
6888 unchecked conversion of an expression from a scalar type to
6889 an array or record type. */
6890 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6891 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6893 /* If the operand is a SAVE_EXPR, we can deal with this by
6894 forcing the SAVE_EXPR into memory. */
6895 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
6897 put_var_into_stack (TREE_OPERAND (exp
, 0));
6898 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
6903 = build_qualified_type (TREE_TYPE (tem
),
6904 (TYPE_QUALS (TREE_TYPE (tem
))
6905 | TYPE_QUAL_CONST
));
6906 rtx memloc
= assign_temp (nt
, 1, 1, 1);
6908 emit_move_insn (memloc
, op0
);
6913 if (GET_CODE (op0
) != MEM
)
6916 if (GET_MODE (offset_rtx
) != ptr_mode
)
6917 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6919 #ifdef POINTERS_EXTEND_UNSIGNED
6920 if (GET_MODE (offset_rtx
) != Pmode
)
6921 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
6924 /* A constant address in OP0 can have VOIDmode, we must not try
6925 to call force_reg for that case. Avoid that case. */
6926 if (GET_CODE (op0
) == MEM
6927 && GET_MODE (op0
) == BLKmode
6928 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6930 && (bitpos
% bitsize
) == 0
6931 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6932 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
6934 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
6938 op0
= offset_address (op0
, offset_rtx
,
6939 highest_pow2_factor (offset
));
6942 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6943 record its alignment as BIGGEST_ALIGNMENT. */
6944 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
6945 && is_aligning_offset (offset
, tem
))
6946 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
6948 /* Don't forget about volatility even if this is a bitfield. */
6949 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6951 if (op0
== orig_op0
)
6952 op0
= copy_rtx (op0
);
6954 MEM_VOLATILE_P (op0
) = 1;
6957 /* In cases where an aligned union has an unaligned object
6958 as a field, we might be extracting a BLKmode value from
6959 an integer-mode (e.g., SImode) object. Handle this case
6960 by doing the extract into an object as wide as the field
6961 (which we know to be the width of a basic mode), then
6962 storing into memory, and changing the mode to BLKmode. */
6963 if (mode1
== VOIDmode
6964 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6965 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6966 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6967 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
6968 && modifier
!= EXPAND_CONST_ADDRESS
6969 && modifier
!= EXPAND_INITIALIZER
)
6970 /* If the field isn't aligned enough to fetch as a memref,
6971 fetch it as a bit field. */
6972 || (mode1
!= BLKmode
6973 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
6974 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6975 < GET_MODE_ALIGNMENT (mode
))
6976 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
6977 /* If the type and the field are a constant size and the
6978 size of the type isn't the same size as the bitfield,
6979 we must use bitfield operations. */
6981 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
6983 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
6986 enum machine_mode ext_mode
= mode
;
6988 if (ext_mode
== BLKmode
6989 && ! (target
!= 0 && GET_CODE (op0
) == MEM
6990 && GET_CODE (target
) == MEM
6991 && bitpos
% BITS_PER_UNIT
== 0))
6992 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6994 if (ext_mode
== BLKmode
)
6996 /* In this case, BITPOS must start at a byte boundary and
6997 TARGET, if specified, must be a MEM. */
6998 if (GET_CODE (op0
) != MEM
6999 || (target
!= 0 && GET_CODE (target
) != MEM
)
7000 || bitpos
% BITS_PER_UNIT
!= 0)
7003 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7005 target
= assign_temp (type
, 0, 1, 1);
7007 emit_block_move (target
, op0
,
7008 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7014 op0
= validize_mem (op0
);
7016 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7017 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7019 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7020 unsignedp
, target
, ext_mode
, ext_mode
,
7021 int_size_in_bytes (TREE_TYPE (tem
)));
7023 /* If the result is a record type and BITSIZE is narrower than
7024 the mode of OP0, an integral mode, and this is a big endian
7025 machine, we must put the field into the high-order bits. */
7026 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7027 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7028 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7029 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7030 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7034 if (mode
== BLKmode
)
7036 rtx
new = assign_temp (build_qualified_type
7037 ((*lang_hooks
.types
.type_for_mode
)
7039 TYPE_QUAL_CONST
), 0, 1, 1);
7041 emit_move_insn (new, op0
);
7042 op0
= copy_rtx (new);
7043 PUT_MODE (op0
, BLKmode
);
7044 set_mem_attributes (op0
, exp
, 1);
7050 /* If the result is BLKmode, use that to access the object
7052 if (mode
== BLKmode
)
7055 /* Get a reference to just this component. */
7056 if (modifier
== EXPAND_CONST_ADDRESS
7057 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7058 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7060 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7062 if (op0
== orig_op0
)
7063 op0
= copy_rtx (op0
);
7065 set_mem_attributes (op0
, exp
, 0);
7066 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7067 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7069 MEM_VOLATILE_P (op0
) |= volatilep
;
7070 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7071 || modifier
== EXPAND_CONST_ADDRESS
7072 || modifier
== EXPAND_INITIALIZER
)
7074 else if (target
== 0)
7075 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7077 convert_move (target
, op0
, unsignedp
);
7083 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7085 /* Evaluate the interior expression. */
7086 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7089 /* Get or create an instruction off which to hang a note. */
7090 if (REG_P (subtarget
))
7093 insn
= get_last_insn ();
7096 if (! INSN_P (insn
))
7097 insn
= prev_nonnote_insn (insn
);
7101 target
= gen_reg_rtx (GET_MODE (subtarget
));
7102 insn
= emit_move_insn (target
, subtarget
);
7105 /* Collect the data for the note. */
7106 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7107 vtbl_ref
= plus_constant (vtbl_ref
,
7108 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7109 /* Discard the initial CONST that was added. */
7110 vtbl_ref
= XEXP (vtbl_ref
, 0);
7113 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7118 /* Intended for a reference to a buffer of a file-object in Pascal.
7119 But it's not certain that a special tree code will really be
7120 necessary for these. INDIRECT_REF might work for them. */
7126 /* Pascal set IN expression.
7129 rlo = set_low - (set_low%bits_per_word);
7130 the_word = set [ (index - rlo)/bits_per_word ];
7131 bit_index = index % bits_per_word;
7132 bitmask = 1 << bit_index;
7133 return !!(the_word & bitmask); */
7135 tree set
= TREE_OPERAND (exp
, 0);
7136 tree index
= TREE_OPERAND (exp
, 1);
7137 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7138 tree set_type
= TREE_TYPE (set
);
7139 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7140 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7141 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7142 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7143 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7144 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7145 rtx setaddr
= XEXP (setval
, 0);
7146 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7148 rtx diff
, quo
, rem
, addr
, bit
, result
;
7150 /* If domain is empty, answer is no. Likewise if index is constant
7151 and out of bounds. */
7152 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7153 && TREE_CODE (set_low_bound
) == INTEGER_CST
7154 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7155 || (TREE_CODE (index
) == INTEGER_CST
7156 && TREE_CODE (set_low_bound
) == INTEGER_CST
7157 && tree_int_cst_lt (index
, set_low_bound
))
7158 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7159 && TREE_CODE (index
) == INTEGER_CST
7160 && tree_int_cst_lt (set_high_bound
, index
))))
7164 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7166 /* If we get here, we have to generate the code for both cases
7167 (in range and out of range). */
7169 op0
= gen_label_rtx ();
7170 op1
= gen_label_rtx ();
7172 if (! (GET_CODE (index_val
) == CONST_INT
7173 && GET_CODE (lo_r
) == CONST_INT
))
7174 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7175 GET_MODE (index_val
), iunsignedp
, op1
);
7177 if (! (GET_CODE (index_val
) == CONST_INT
7178 && GET_CODE (hi_r
) == CONST_INT
))
7179 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7180 GET_MODE (index_val
), iunsignedp
, op1
);
7182 /* Calculate the element number of bit zero in the first word
7184 if (GET_CODE (lo_r
) == CONST_INT
)
7185 rlow
= GEN_INT (INTVAL (lo_r
)
7186 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7188 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7189 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7190 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7192 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7193 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7195 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7196 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7197 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7198 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7200 addr
= memory_address (byte_mode
,
7201 expand_binop (index_mode
, add_optab
, diff
,
7202 setaddr
, NULL_RTX
, iunsignedp
,
7205 /* Extract the bit we want to examine. */
7206 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7207 gen_rtx_MEM (byte_mode
, addr
),
7208 make_tree (TREE_TYPE (index
), rem
),
7210 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7211 GET_MODE (target
) == byte_mode
? target
: 0,
7212 1, OPTAB_LIB_WIDEN
);
7214 if (result
!= target
)
7215 convert_move (target
, result
, 1);
7217 /* Output the code to handle the out-of-range case. */
7220 emit_move_insn (target
, const0_rtx
);
7225 case WITH_CLEANUP_EXPR
:
7226 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7228 WITH_CLEANUP_EXPR_RTL (exp
)
7229 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7230 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7231 CLEANUP_EH_ONLY (exp
));
7233 /* That's it for this cleanup. */
7234 TREE_OPERAND (exp
, 1) = 0;
7236 return WITH_CLEANUP_EXPR_RTL (exp
);
7238 case CLEANUP_POINT_EXPR
:
7240 /* Start a new binding layer that will keep track of all cleanup
7241 actions to be performed. */
7242 expand_start_bindings (2);
7244 target_temp_slot_level
= temp_slot_level
;
7246 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7247 /* If we're going to use this value, load it up now. */
7249 op0
= force_not_mem (op0
);
7250 preserve_temp_slots (op0
);
7251 expand_end_bindings (NULL_TREE
, 0, 0);
7256 /* Check for a built-in function. */
7257 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7258 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7260 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7262 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7263 == BUILT_IN_FRONTEND
)
7264 return (*lang_hooks
.expand_expr
)
7265 (exp
, original_target
, tmode
, modifier
);
7267 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7270 return expand_call (exp
, target
, ignore
);
7272 case NON_LVALUE_EXPR
:
7275 case REFERENCE_EXPR
:
7276 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7279 if (TREE_CODE (type
) == UNION_TYPE
)
7281 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7283 /* If both input and output are BLKmode, this conversion isn't doing
7284 anything except possibly changing memory attribute. */
7285 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7287 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7290 result
= copy_rtx (result
);
7291 set_mem_attributes (result
, exp
, 0);
7296 target
= assign_temp (type
, 0, 1, 1);
7298 if (GET_CODE (target
) == MEM
)
7299 /* Store data into beginning of memory target. */
7300 store_expr (TREE_OPERAND (exp
, 0),
7301 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7303 else if (GET_CODE (target
) == REG
)
7304 /* Store this field into a union of the proper type. */
7305 store_field (target
,
7306 MIN ((int_size_in_bytes (TREE_TYPE
7307 (TREE_OPERAND (exp
, 0)))
7309 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7310 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7311 VOIDmode
, 0, type
, 0);
7315 /* Return the entire union. */
7319 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7321 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7324 /* If the signedness of the conversion differs and OP0 is
7325 a promoted SUBREG, clear that indication since we now
7326 have to do the proper extension. */
7327 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7328 && GET_CODE (op0
) == SUBREG
)
7329 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7334 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7335 if (GET_MODE (op0
) == mode
)
7338 /* If OP0 is a constant, just convert it into the proper mode. */
7339 if (CONSTANT_P (op0
))
7341 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7342 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7344 if (modifier
== EXPAND_INITIALIZER
)
7345 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7346 subreg_lowpart_offset (mode
,
7349 return convert_modes (mode
, inner_mode
, op0
,
7350 TREE_UNSIGNED (inner_type
));
7353 if (modifier
== EXPAND_INITIALIZER
)
7354 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7358 convert_to_mode (mode
, op0
,
7359 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7361 convert_move (target
, op0
,
7362 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7365 case VIEW_CONVERT_EXPR
:
7366 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7368 /* If the input and output modes are both the same, we are done.
7369 Otherwise, if neither mode is BLKmode and both are within a word, we
7370 can use gen_lowpart. If neither is true, make sure the operand is
7371 in memory and convert the MEM to the new mode. */
7372 if (TYPE_MODE (type
) == GET_MODE (op0
))
7374 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7375 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7376 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7377 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7378 else if (GET_CODE (op0
) != MEM
)
7380 /* If the operand is not a MEM, force it into memory. Since we
7381 are going to be be changing the mode of the MEM, don't call
7382 force_const_mem for constants because we don't allow pool
7383 constants to change mode. */
7384 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7386 if (TREE_ADDRESSABLE (exp
))
7389 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7391 = assign_stack_temp_for_type
7392 (TYPE_MODE (inner_type
),
7393 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7395 emit_move_insn (target
, op0
);
7399 /* At this point, OP0 is in the correct mode. If the output type is such
7400 that the operand is known to be aligned, indicate that it is.
7401 Otherwise, we need only be concerned about alignment for non-BLKmode
7403 if (GET_CODE (op0
) == MEM
)
7405 op0
= copy_rtx (op0
);
7407 if (TYPE_ALIGN_OK (type
))
7408 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7409 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7410 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7412 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7413 HOST_WIDE_INT temp_size
7414 = MAX (int_size_in_bytes (inner_type
),
7415 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7416 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7417 temp_size
, 0, type
);
7418 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7420 if (TREE_ADDRESSABLE (exp
))
7423 if (GET_MODE (op0
) == BLKmode
)
7424 emit_block_move (new_with_op0_mode
, op0
,
7425 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7427 emit_move_insn (new_with_op0_mode
, op0
);
7432 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7438 /* We come here from MINUS_EXPR when the second operand is a
7441 this_optab
= ! unsignedp
&& flag_trapv
7442 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7443 ? addv_optab
: add_optab
;
7445 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7446 something else, make sure we add the register to the constant and
7447 then to the other thing. This case can occur during strength
7448 reduction and doing it this way will produce better code if the
7449 frame pointer or argument pointer is eliminated.
7451 fold-const.c will ensure that the constant is always in the inner
7452 PLUS_EXPR, so the only case we need to do anything about is if
7453 sp, ap, or fp is our second argument, in which case we must swap
7454 the innermost first argument and our second argument. */
7456 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7457 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7458 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7459 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7460 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7461 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7463 tree t
= TREE_OPERAND (exp
, 1);
7465 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7466 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7469 /* If the result is to be ptr_mode and we are adding an integer to
7470 something, we might be forming a constant. So try to use
7471 plus_constant. If it produces a sum and we can't accept it,
7472 use force_operand. This allows P = &ARR[const] to generate
7473 efficient code on machines where a SYMBOL_REF is not a valid
7476 If this is an EXPAND_SUM call, always return the sum. */
7477 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7478 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7480 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7481 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7482 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7486 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7488 /* Use immed_double_const to ensure that the constant is
7489 truncated according to the mode of OP1, then sign extended
7490 to a HOST_WIDE_INT. Using the constant directly can result
7491 in non-canonical RTL in a 64x32 cross compile. */
7493 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7495 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7496 op1
= plus_constant (op1
, INTVAL (constant_part
));
7497 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7498 op1
= force_operand (op1
, target
);
7502 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7503 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7504 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7508 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7509 (modifier
== EXPAND_INITIALIZER
7510 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7511 if (! CONSTANT_P (op0
))
7513 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7514 VOIDmode
, modifier
);
7515 /* Don't go to both_summands if modifier
7516 says it's not right to return a PLUS. */
7517 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7521 /* Use immed_double_const to ensure that the constant is
7522 truncated according to the mode of OP1, then sign extended
7523 to a HOST_WIDE_INT. Using the constant directly can result
7524 in non-canonical RTL in a 64x32 cross compile. */
7526 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7528 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7529 op0
= plus_constant (op0
, INTVAL (constant_part
));
7530 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7531 op0
= force_operand (op0
, target
);
7536 /* No sense saving up arithmetic to be done
7537 if it's all in the wrong mode to form part of an address.
7538 And force_operand won't know whether to sign-extend or
7540 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7541 || mode
!= ptr_mode
)
7544 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7547 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
7548 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
7551 /* Make sure any term that's a sum with a constant comes last. */
7552 if (GET_CODE (op0
) == PLUS
7553 && CONSTANT_P (XEXP (op0
, 1)))
7559 /* If adding to a sum including a constant,
7560 associate it to put the constant outside. */
7561 if (GET_CODE (op1
) == PLUS
7562 && CONSTANT_P (XEXP (op1
, 1)))
7564 rtx constant_term
= const0_rtx
;
7566 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7569 /* Ensure that MULT comes first if there is one. */
7570 else if (GET_CODE (op0
) == MULT
)
7571 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7573 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7575 /* Let's also eliminate constants from op0 if possible. */
7576 op0
= eliminate_constant_term (op0
, &constant_term
);
7578 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7579 their sum should be a constant. Form it into OP1, since the
7580 result we want will then be OP0 + OP1. */
7582 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7587 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7590 /* Put a constant term last and put a multiplication first. */
7591 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7592 temp
= op1
, op1
= op0
, op0
= temp
;
7594 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7595 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7598 /* For initializers, we are allowed to return a MINUS of two
7599 symbolic constants. Here we handle all cases when both operands
7601 /* Handle difference of two symbolic constants,
7602 for the sake of an initializer. */
7603 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7604 && really_constant_p (TREE_OPERAND (exp
, 0))
7605 && really_constant_p (TREE_OPERAND (exp
, 1)))
7607 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
7609 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
7612 /* If the last operand is a CONST_INT, use plus_constant of
7613 the negated constant. Else make the MINUS. */
7614 if (GET_CODE (op1
) == CONST_INT
)
7615 return plus_constant (op0
, - INTVAL (op1
));
7617 return gen_rtx_MINUS (mode
, op0
, op1
);
7619 /* Convert A - const to A + (-const). */
7620 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7622 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7623 TREE_OPERAND (exp
, 1)));
7625 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7626 /* If we can't negate the constant in TYPE, leave it alone and
7627 expand_binop will negate it for us. We used to try to do it
7628 here in the signed version of TYPE, but that doesn't work
7629 on POINTER_TYPEs. */;
7632 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7636 this_optab
= ! unsignedp
&& flag_trapv
7637 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7638 ? subv_optab
: sub_optab
;
7642 /* If first operand is constant, swap them.
7643 Thus the following special case checks need only
7644 check the second operand. */
7645 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7647 tree t1
= TREE_OPERAND (exp
, 0);
7648 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7649 TREE_OPERAND (exp
, 1) = t1
;
7652 /* Attempt to return something suitable for generating an
7653 indexed address, for machines that support that. */
7655 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7656 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7658 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7661 /* If we knew for certain that this is arithmetic for an array
7662 reference, and we knew the bounds of the array, then we could
7663 apply the distributive law across (PLUS X C) for constant C.
7664 Without such knowledge, we risk overflowing the computation
7665 when both X and C are large, but X+C isn't. */
7666 /* ??? Could perhaps special-case EXP being unsigned and C being
7667 positive. In that case we are certain that X+C is no smaller
7668 than X and so the transformed expression will overflow iff the
7669 original would have. */
7671 if (GET_CODE (op0
) != REG
)
7672 op0
= force_operand (op0
, NULL_RTX
);
7673 if (GET_CODE (op0
) != REG
)
7674 op0
= copy_to_mode_reg (mode
, op0
);
7677 gen_rtx_MULT (mode
, op0
,
7678 GEN_INT (tree_low_cst (TREE_OPERAND (exp
, 1), 0)));
7681 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7684 /* Check for multiplying things that have been extended
7685 from a narrower type. If this machine supports multiplying
7686 in that narrower type with a result in the desired type,
7687 do it that way, and avoid the explicit type-conversion. */
7688 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7689 && TREE_CODE (type
) == INTEGER_TYPE
7690 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7691 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7692 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7693 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7694 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7695 /* Don't use a widening multiply if a shift will do. */
7696 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7697 > HOST_BITS_PER_WIDE_INT
)
7698 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7700 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7701 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7703 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7704 /* If both operands are extended, they must either both
7705 be zero-extended or both be sign-extended. */
7706 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7708 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7710 enum machine_mode innermode
7711 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7712 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7713 ? smul_widen_optab
: umul_widen_optab
);
7714 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7715 ? umul_widen_optab
: smul_widen_optab
);
7716 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7718 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7720 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7721 NULL_RTX
, VOIDmode
, 0);
7722 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7723 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7726 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7727 NULL_RTX
, VOIDmode
, 0);
7730 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7731 && innermode
== word_mode
)
7734 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7735 NULL_RTX
, VOIDmode
, 0);
7736 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7737 op1
= convert_modes (innermode
, mode
,
7738 expand_expr (TREE_OPERAND (exp
, 1),
7739 NULL_RTX
, VOIDmode
, 0),
7742 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7743 NULL_RTX
, VOIDmode
, 0);
7744 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7745 unsignedp
, OPTAB_LIB_WIDEN
);
7746 htem
= expand_mult_highpart_adjust (innermode
,
7747 gen_highpart (innermode
, temp
),
7749 gen_highpart (innermode
, temp
),
7751 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7756 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7757 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7758 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7760 case TRUNC_DIV_EXPR
:
7761 case FLOOR_DIV_EXPR
:
7763 case ROUND_DIV_EXPR
:
7764 case EXACT_DIV_EXPR
:
7765 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7767 /* Possible optimization: compute the dividend with EXPAND_SUM
7768 then if the divisor is constant can optimize the case
7769 where some terms of the dividend have coeffs divisible by it. */
7770 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7771 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7772 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7775 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7776 expensive divide. If not, combine will rebuild the original
7778 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7779 && TREE_CODE (type
) == REAL_TYPE
7780 && !real_onep (TREE_OPERAND (exp
, 0)))
7781 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7782 build (RDIV_EXPR
, type
,
7783 build_real (type
, dconst1
),
7784 TREE_OPERAND (exp
, 1))),
7785 target
, tmode
, unsignedp
);
7786 this_optab
= sdiv_optab
;
7789 case TRUNC_MOD_EXPR
:
7790 case FLOOR_MOD_EXPR
:
7792 case ROUND_MOD_EXPR
:
7793 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7795 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7796 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7797 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7799 case FIX_ROUND_EXPR
:
7800 case FIX_FLOOR_EXPR
:
7802 abort (); /* Not used for C. */
7804 case FIX_TRUNC_EXPR
:
7805 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7807 target
= gen_reg_rtx (mode
);
7808 expand_fix (target
, op0
, unsignedp
);
7812 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7814 target
= gen_reg_rtx (mode
);
7815 /* expand_float can't figure out what to do if FROM has VOIDmode.
7816 So give it the correct mode. With -O, cse will optimize this. */
7817 if (GET_MODE (op0
) == VOIDmode
)
7818 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7820 expand_float (target
, op0
,
7821 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7825 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7826 temp
= expand_unop (mode
,
7827 ! unsignedp
&& flag_trapv
7828 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7829 ? negv_optab
: neg_optab
, op0
, target
, 0);
7835 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7837 /* Handle complex values specially. */
7838 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7839 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7840 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7842 /* Unsigned abs is simply the operand. Testing here means we don't
7843 risk generating incorrect code below. */
7844 if (TREE_UNSIGNED (type
))
7847 return expand_abs (mode
, op0
, target
, unsignedp
,
7848 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7852 target
= original_target
;
7853 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7854 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7855 || GET_MODE (target
) != mode
7856 || (GET_CODE (target
) == REG
7857 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7858 target
= gen_reg_rtx (mode
);
7859 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7860 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7862 /* First try to do it with a special MIN or MAX instruction.
7863 If that does not win, use a conditional jump to select the proper
7865 this_optab
= (TREE_UNSIGNED (type
)
7866 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7867 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7869 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7874 /* At this point, a MEM target is no longer useful; we will get better
7877 if (GET_CODE (target
) == MEM
)
7878 target
= gen_reg_rtx (mode
);
7881 emit_move_insn (target
, op0
);
7883 op0
= gen_label_rtx ();
7885 /* If this mode is an integer too wide to compare properly,
7886 compare word by word. Rely on cse to optimize constant cases. */
7887 if (GET_MODE_CLASS (mode
) == MODE_INT
7888 && ! can_compare_p (GE
, mode
, ccp_jump
))
7890 if (code
== MAX_EXPR
)
7891 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7892 target
, op1
, NULL_RTX
, op0
);
7894 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7895 op1
, target
, NULL_RTX
, op0
);
7899 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7900 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7901 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
7904 emit_move_insn (target
, op1
);
7909 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7910 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7916 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7917 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7922 /* ??? Can optimize bitwise operations with one arg constant.
7923 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7924 and (a bitwise1 b) bitwise2 b (etc)
7925 but that is probably not worth while. */
7927 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7928 boolean values when we want in all cases to compute both of them. In
7929 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7930 as actual zero-or-1 values and then bitwise anding. In cases where
7931 there cannot be any side effects, better code would be made by
7932 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7933 how to recognize those cases. */
7935 case TRUTH_AND_EXPR
:
7937 this_optab
= and_optab
;
7942 this_optab
= ior_optab
;
7945 case TRUTH_XOR_EXPR
:
7947 this_optab
= xor_optab
;
7954 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7956 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7957 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7960 /* Could determine the answer when only additive constants differ. Also,
7961 the addition of one can be handled by changing the condition. */
7968 case UNORDERED_EXPR
:
7975 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7979 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7980 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7982 && GET_CODE (original_target
) == REG
7983 && (GET_MODE (original_target
)
7984 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7986 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7989 /* If temp is constant, we can just compute the result. */
7990 if (GET_CODE (temp
) == CONST_INT
)
7992 if (INTVAL (temp
) != 0)
7993 emit_move_insn (target
, const1_rtx
);
7995 emit_move_insn (target
, const0_rtx
);
8000 if (temp
!= original_target
)
8002 enum machine_mode mode1
= GET_MODE (temp
);
8003 if (mode1
== VOIDmode
)
8004 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8006 temp
= copy_to_mode_reg (mode1
, temp
);
8009 op1
= gen_label_rtx ();
8010 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8011 GET_MODE (temp
), unsignedp
, op1
);
8012 emit_move_insn (temp
, const1_rtx
);
8017 /* If no set-flag instruction, must generate a conditional
8018 store into a temporary variable. Drop through
8019 and handle this like && and ||. */
8021 case TRUTH_ANDIF_EXPR
:
8022 case TRUTH_ORIF_EXPR
:
8024 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8025 /* Make sure we don't have a hard reg (such as function's return
8026 value) live across basic blocks, if not optimizing. */
8027 || (!optimize
&& GET_CODE (target
) == REG
8028 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8029 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8032 emit_clr_insn (target
);
8034 op1
= gen_label_rtx ();
8035 jumpifnot (exp
, op1
);
8038 emit_0_to_1_insn (target
);
8041 return ignore
? const0_rtx
: target
;
8043 case TRUTH_NOT_EXPR
:
8044 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8045 /* The parser is careful to generate TRUTH_NOT_EXPR
8046 only with operands that are always zero or one. */
8047 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8048 target
, 1, OPTAB_LIB_WIDEN
);
8054 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8056 return expand_expr (TREE_OPERAND (exp
, 1),
8057 (ignore
? const0_rtx
: target
),
8061 /* If we would have a "singleton" (see below) were it not for a
8062 conversion in each arm, bring that conversion back out. */
8063 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8064 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8065 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8066 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8068 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8069 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8071 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8072 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8073 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8074 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8075 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8076 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8077 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8078 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8079 return expand_expr (build1 (NOP_EXPR
, type
,
8080 build (COND_EXPR
, TREE_TYPE (iftrue
),
8081 TREE_OPERAND (exp
, 0),
8083 target
, tmode
, modifier
);
8087 /* Note that COND_EXPRs whose type is a structure or union
8088 are required to be constructed to contain assignments of
8089 a temporary variable, so that we can evaluate them here
8090 for side effect only. If type is void, we must do likewise. */
8092 /* If an arm of the branch requires a cleanup,
8093 only that cleanup is performed. */
8096 tree binary_op
= 0, unary_op
= 0;
8098 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8099 convert it to our mode, if necessary. */
8100 if (integer_onep (TREE_OPERAND (exp
, 1))
8101 && integer_zerop (TREE_OPERAND (exp
, 2))
8102 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8106 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8111 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8112 if (GET_MODE (op0
) == mode
)
8116 target
= gen_reg_rtx (mode
);
8117 convert_move (target
, op0
, unsignedp
);
8121 /* Check for X ? A + B : A. If we have this, we can copy A to the
8122 output and conditionally add B. Similarly for unary operations.
8123 Don't do this if X has side-effects because those side effects
8124 might affect A or B and the "?" operation is a sequence point in
8125 ANSI. (operand_equal_p tests for side effects.) */
8127 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8128 && operand_equal_p (TREE_OPERAND (exp
, 2),
8129 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8130 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8131 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8132 && operand_equal_p (TREE_OPERAND (exp
, 1),
8133 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8134 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8135 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8136 && operand_equal_p (TREE_OPERAND (exp
, 2),
8137 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8138 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8139 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8140 && operand_equal_p (TREE_OPERAND (exp
, 1),
8141 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8142 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8144 /* If we are not to produce a result, we have no target. Otherwise,
8145 if a target was specified use it; it will not be used as an
8146 intermediate target unless it is safe. If no target, use a
8151 else if (original_target
8152 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8153 || (singleton
&& GET_CODE (original_target
) == REG
8154 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8155 && original_target
== var_rtx (singleton
)))
8156 && GET_MODE (original_target
) == mode
8157 #ifdef HAVE_conditional_move
8158 && (! can_conditionally_move_p (mode
)
8159 || GET_CODE (original_target
) == REG
8160 || TREE_ADDRESSABLE (type
))
8162 && (GET_CODE (original_target
) != MEM
8163 || TREE_ADDRESSABLE (type
)))
8164 temp
= original_target
;
8165 else if (TREE_ADDRESSABLE (type
))
8168 temp
= assign_temp (type
, 0, 0, 1);
8170 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8171 do the test of X as a store-flag operation, do this as
8172 A + ((X != 0) << log C). Similarly for other simple binary
8173 operators. Only do for C == 1 if BRANCH_COST is low. */
8174 if (temp
&& singleton
&& binary_op
8175 && (TREE_CODE (binary_op
) == PLUS_EXPR
8176 || TREE_CODE (binary_op
) == MINUS_EXPR
8177 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8178 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8179 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8180 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8181 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8184 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8185 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8186 ? addv_optab
: add_optab
)
8187 : TREE_CODE (binary_op
) == MINUS_EXPR
8188 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8189 ? subv_optab
: sub_optab
)
8190 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8193 /* If we had X ? A : A + 1, do this as A + (X == 0).
8195 We have to invert the truth value here and then put it
8196 back later if do_store_flag fails. We cannot simply copy
8197 TREE_OPERAND (exp, 0) to another variable and modify that
8198 because invert_truthvalue can modify the tree pointed to
8200 if (singleton
== TREE_OPERAND (exp
, 1))
8201 TREE_OPERAND (exp
, 0)
8202 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8204 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8205 (safe_from_p (temp
, singleton
, 1)
8207 mode
, BRANCH_COST
<= 1);
8209 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8210 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8211 build_int_2 (tree_log2
8215 (safe_from_p (temp
, singleton
, 1)
8216 ? temp
: NULL_RTX
), 0);
8220 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8221 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8222 unsignedp
, OPTAB_LIB_WIDEN
);
8224 else if (singleton
== TREE_OPERAND (exp
, 1))
8225 TREE_OPERAND (exp
, 0)
8226 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8229 do_pending_stack_adjust ();
8231 op0
= gen_label_rtx ();
8233 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8237 /* If the target conflicts with the other operand of the
8238 binary op, we can't use it. Also, we can't use the target
8239 if it is a hard register, because evaluating the condition
8240 might clobber it. */
8242 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8243 || (GET_CODE (temp
) == REG
8244 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8245 temp
= gen_reg_rtx (mode
);
8246 store_expr (singleton
, temp
, 0);
8249 expand_expr (singleton
,
8250 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8251 if (singleton
== TREE_OPERAND (exp
, 1))
8252 jumpif (TREE_OPERAND (exp
, 0), op0
);
8254 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8256 start_cleanup_deferral ();
8257 if (binary_op
&& temp
== 0)
8258 /* Just touch the other operand. */
8259 expand_expr (TREE_OPERAND (binary_op
, 1),
8260 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8262 store_expr (build (TREE_CODE (binary_op
), type
,
8263 make_tree (type
, temp
),
8264 TREE_OPERAND (binary_op
, 1)),
8267 store_expr (build1 (TREE_CODE (unary_op
), type
,
8268 make_tree (type
, temp
)),
8272 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8273 comparison operator. If we have one of these cases, set the
8274 output to A, branch on A (cse will merge these two references),
8275 then set the output to FOO. */
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8278 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8279 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8280 TREE_OPERAND (exp
, 1), 0)
8281 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8282 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8283 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8285 if (GET_CODE (temp
) == REG
8286 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8287 temp
= gen_reg_rtx (mode
);
8288 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8289 jumpif (TREE_OPERAND (exp
, 0), op0
);
8291 start_cleanup_deferral ();
8292 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8296 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8297 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8298 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8299 TREE_OPERAND (exp
, 2), 0)
8300 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8301 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8302 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8304 if (GET_CODE (temp
) == REG
8305 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8306 temp
= gen_reg_rtx (mode
);
8307 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8308 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8310 start_cleanup_deferral ();
8311 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8316 op1
= gen_label_rtx ();
8317 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8319 start_cleanup_deferral ();
8321 /* One branch of the cond can be void, if it never returns. For
8322 example A ? throw : E */
8324 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8325 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8327 expand_expr (TREE_OPERAND (exp
, 1),
8328 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8329 end_cleanup_deferral ();
8331 emit_jump_insn (gen_jump (op1
));
8334 start_cleanup_deferral ();
8336 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8337 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8339 expand_expr (TREE_OPERAND (exp
, 2),
8340 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8343 end_cleanup_deferral ();
8354 /* Something needs to be initialized, but we didn't know
8355 where that thing was when building the tree. For example,
8356 it could be the return value of a function, or a parameter
8357 to a function which lays down in the stack, or a temporary
8358 variable which must be passed by reference.
8360 We guarantee that the expression will either be constructed
8361 or copied into our original target. */
8363 tree slot
= TREE_OPERAND (exp
, 0);
8364 tree cleanups
= NULL_TREE
;
8367 if (TREE_CODE (slot
) != VAR_DECL
)
8371 target
= original_target
;
8373 /* Set this here so that if we get a target that refers to a
8374 register variable that's already been used, put_reg_into_stack
8375 knows that it should fix up those uses. */
8376 TREE_USED (slot
) = 1;
8380 if (DECL_RTL_SET_P (slot
))
8382 target
= DECL_RTL (slot
);
8383 /* If we have already expanded the slot, so don't do
8385 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8390 target
= assign_temp (type
, 2, 0, 1);
8391 /* All temp slots at this level must not conflict. */
8392 preserve_temp_slots (target
);
8393 SET_DECL_RTL (slot
, target
);
8394 if (TREE_ADDRESSABLE (slot
))
8395 put_var_into_stack (slot
);
8397 /* Since SLOT is not known to the called function
8398 to belong to its stack frame, we must build an explicit
8399 cleanup. This case occurs when we must build up a reference
8400 to pass the reference as an argument. In this case,
8401 it is very likely that such a reference need not be
8404 if (TREE_OPERAND (exp
, 2) == 0)
8405 TREE_OPERAND (exp
, 2)
8406 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8407 cleanups
= TREE_OPERAND (exp
, 2);
8412 /* This case does occur, when expanding a parameter which
8413 needs to be constructed on the stack. The target
8414 is the actual stack address that we want to initialize.
8415 The function we call will perform the cleanup in this case. */
8417 /* If we have already assigned it space, use that space,
8418 not target that we were passed in, as our target
8419 parameter is only a hint. */
8420 if (DECL_RTL_SET_P (slot
))
8422 target
= DECL_RTL (slot
);
8423 /* If we have already expanded the slot, so don't do
8425 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8430 SET_DECL_RTL (slot
, target
);
8431 /* If we must have an addressable slot, then make sure that
8432 the RTL that we just stored in slot is OK. */
8433 if (TREE_ADDRESSABLE (slot
))
8434 put_var_into_stack (slot
);
8438 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8439 /* Mark it as expanded. */
8440 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8442 store_expr (exp1
, target
, 0);
8444 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8451 tree lhs
= TREE_OPERAND (exp
, 0);
8452 tree rhs
= TREE_OPERAND (exp
, 1);
8454 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8460 /* If lhs is complex, expand calls in rhs before computing it.
8461 That's so we don't compute a pointer and save it over a
8462 call. If lhs is simple, compute it first so we can give it
8463 as a target if the rhs is just a call. This avoids an
8464 extra temp and copy and that prevents a partial-subsumption
8465 which makes bad code. Actually we could treat
8466 component_ref's of vars like vars. */
8468 tree lhs
= TREE_OPERAND (exp
, 0);
8469 tree rhs
= TREE_OPERAND (exp
, 1);
8473 /* Check for |= or &= of a bitfield of size one into another bitfield
8474 of size 1. In this case, (unless we need the result of the
8475 assignment) we can do this more efficiently with a
8476 test followed by an assignment, if necessary.
8478 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8479 things change so we do, this code should be enhanced to
8482 && TREE_CODE (lhs
) == COMPONENT_REF
8483 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8484 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8485 && TREE_OPERAND (rhs
, 0) == lhs
8486 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8487 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8488 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8490 rtx label
= gen_label_rtx ();
8492 do_jump (TREE_OPERAND (rhs
, 1),
8493 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8494 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8495 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8496 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8498 : integer_zero_node
)),
8500 do_pending_stack_adjust ();
8505 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8511 if (!TREE_OPERAND (exp
, 0))
8512 expand_null_return ();
8514 expand_return (TREE_OPERAND (exp
, 0));
8517 case PREINCREMENT_EXPR
:
8518 case PREDECREMENT_EXPR
:
8519 return expand_increment (exp
, 0, ignore
);
8521 case POSTINCREMENT_EXPR
:
8522 case POSTDECREMENT_EXPR
:
8523 /* Faster to treat as pre-increment if result is not used. */
8524 return expand_increment (exp
, ! ignore
, ignore
);
8527 /* Are we taking the address of a nested function? */
8528 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8529 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8530 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8531 && ! TREE_STATIC (exp
))
8533 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8534 op0
= force_operand (op0
, target
);
8536 /* If we are taking the address of something erroneous, just
8538 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8540 /* If we are taking the address of a constant and are at the
8541 top level, we have to use output_constant_def since we can't
8542 call force_const_mem at top level. */
8544 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8545 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8547 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8550 /* We make sure to pass const0_rtx down if we came in with
8551 ignore set, to avoid doing the cleanups twice for something. */
8552 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8553 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8554 (modifier
== EXPAND_INITIALIZER
8555 ? modifier
: EXPAND_CONST_ADDRESS
));
8557 /* If we are going to ignore the result, OP0 will have been set
8558 to const0_rtx, so just return it. Don't get confused and
8559 think we are taking the address of the constant. */
8563 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8564 clever and returns a REG when given a MEM. */
8565 op0
= protect_from_queue (op0
, 1);
8567 /* We would like the object in memory. If it is a constant, we can
8568 have it be statically allocated into memory. For a non-constant,
8569 we need to allocate some memory and store the value into it. */
8571 if (CONSTANT_P (op0
))
8572 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8574 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8575 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8576 || GET_CODE (op0
) == PARALLEL
)
8578 /* If the operand is a SAVE_EXPR, we can deal with this by
8579 forcing the SAVE_EXPR into memory. */
8580 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8582 put_var_into_stack (TREE_OPERAND (exp
, 0));
8583 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8587 /* If this object is in a register, it can't be BLKmode. */
8588 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8589 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8591 if (GET_CODE (op0
) == PARALLEL
)
8592 /* Handle calls that pass values in multiple
8593 non-contiguous locations. The Irix 6 ABI has examples
8595 emit_group_store (memloc
, op0
,
8596 int_size_in_bytes (inner_type
));
8598 emit_move_insn (memloc
, op0
);
8604 if (GET_CODE (op0
) != MEM
)
8607 mark_temp_addr_taken (op0
);
8608 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8610 op0
= XEXP (op0
, 0);
8611 #ifdef POINTERS_EXTEND_UNSIGNED
8612 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8613 && mode
== ptr_mode
)
8614 op0
= convert_memory_address (ptr_mode
, op0
);
8619 /* If OP0 is not aligned as least as much as the type requires, we
8620 need to make a temporary, copy OP0 to it, and take the address of
8621 the temporary. We want to use the alignment of the type, not of
8622 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8623 the test for BLKmode means that can't happen. The test for
8624 BLKmode is because we never make mis-aligned MEMs with
8627 We don't need to do this at all if the machine doesn't have
8628 strict alignment. */
8629 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8630 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8632 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8634 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8636 = assign_stack_temp_for_type
8637 (TYPE_MODE (inner_type
),
8638 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8639 : int_size_in_bytes (inner_type
),
8640 1, build_qualified_type (inner_type
,
8641 (TYPE_QUALS (inner_type
)
8642 | TYPE_QUAL_CONST
)));
8644 if (TYPE_ALIGN_OK (inner_type
))
8647 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8651 op0
= force_operand (XEXP (op0
, 0), target
);
8655 && GET_CODE (op0
) != REG
8656 && modifier
!= EXPAND_CONST_ADDRESS
8657 && modifier
!= EXPAND_INITIALIZER
8658 && modifier
!= EXPAND_SUM
)
8659 op0
= force_reg (Pmode
, op0
);
8661 if (GET_CODE (op0
) == REG
8662 && ! REG_USERVAR_P (op0
))
8663 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8665 #ifdef POINTERS_EXTEND_UNSIGNED
8666 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8667 && mode
== ptr_mode
)
8668 op0
= convert_memory_address (ptr_mode
, op0
);
8673 case ENTRY_VALUE_EXPR
:
8676 /* COMPLEX type for Extended Pascal & Fortran */
8679 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8682 /* Get the rtx code of the operands. */
8683 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8684 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8687 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8691 /* Move the real (op0) and imaginary (op1) parts to their location. */
8692 emit_move_insn (gen_realpart (mode
, target
), op0
);
8693 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8695 insns
= get_insns ();
8698 /* Complex construction should appear as a single unit. */
8699 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8700 each with a separate pseudo as destination.
8701 It's not correct for flow to treat them as a unit. */
8702 if (GET_CODE (target
) != CONCAT
)
8703 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8711 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8712 return gen_realpart (mode
, op0
);
8715 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8716 return gen_imagpart (mode
, op0
);
8720 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8724 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8727 target
= gen_reg_rtx (mode
);
8731 /* Store the realpart and the negated imagpart to target. */
8732 emit_move_insn (gen_realpart (partmode
, target
),
8733 gen_realpart (partmode
, op0
));
8735 imag_t
= gen_imagpart (partmode
, target
);
8736 temp
= expand_unop (partmode
,
8737 ! unsignedp
&& flag_trapv
8738 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8739 ? negv_optab
: neg_optab
,
8740 gen_imagpart (partmode
, op0
), imag_t
, 0);
8742 emit_move_insn (imag_t
, temp
);
8744 insns
= get_insns ();
8747 /* Conjugate should appear as a single unit
8748 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8749 each with a separate pseudo as destination.
8750 It's not correct for flow to treat them as a unit. */
8751 if (GET_CODE (target
) != CONCAT
)
8752 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8759 case TRY_CATCH_EXPR
:
8761 tree handler
= TREE_OPERAND (exp
, 1);
8763 expand_eh_region_start ();
8765 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8767 expand_eh_region_end_cleanup (handler
);
8772 case TRY_FINALLY_EXPR
:
8774 tree try_block
= TREE_OPERAND (exp
, 0);
8775 tree finally_block
= TREE_OPERAND (exp
, 1);
8776 rtx finally_label
= gen_label_rtx ();
8777 rtx done_label
= gen_label_rtx ();
8778 rtx return_link
= gen_reg_rtx (Pmode
);
8779 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8780 (tree
) finally_label
, (tree
) return_link
);
8781 TREE_SIDE_EFFECTS (cleanup
) = 1;
8783 /* Start a new binding layer that will keep track of all cleanup
8784 actions to be performed. */
8785 expand_start_bindings (2);
8787 target_temp_slot_level
= temp_slot_level
;
8789 expand_decl_cleanup (NULL_TREE
, cleanup
);
8790 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8792 preserve_temp_slots (op0
);
8793 expand_end_bindings (NULL_TREE
, 0, 0);
8794 emit_jump (done_label
);
8795 emit_label (finally_label
);
8796 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8797 emit_indirect_jump (return_link
);
8798 emit_label (done_label
);
8802 case GOTO_SUBROUTINE_EXPR
:
8804 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8805 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8806 rtx return_address
= gen_label_rtx ();
8807 emit_move_insn (return_link
,
8808 gen_rtx_LABEL_REF (Pmode
, return_address
));
8810 emit_label (return_address
);
8815 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8818 return get_exception_pointer (cfun
);
8821 /* Function descriptors are not valid except for as
8822 initialization constants, and should not be expanded. */
8826 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
8829 /* Here to do an ordinary binary operator, generating an instruction
8830 from the optab already placed in `this_optab'. */
8832 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8834 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8835 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8837 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8838 unsignedp
, OPTAB_LIB_WIDEN
);
8844 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8845 when applied to the address of EXP produces an address known to be
8846 aligned more than BIGGEST_ALIGNMENT. */
8849 is_aligning_offset (offset
, exp
)
8853 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
8854 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8855 || TREE_CODE (offset
) == NOP_EXPR
8856 || TREE_CODE (offset
) == CONVERT_EXPR
8857 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
8858 offset
= TREE_OPERAND (offset
, 0);
8860 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8861 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8862 if (TREE_CODE (offset
) != BIT_AND_EXPR
8863 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
8864 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
8865 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
8868 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8869 It must be NEGATE_EXPR. Then strip any more conversions. */
8870 offset
= TREE_OPERAND (offset
, 0);
8871 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8872 || TREE_CODE (offset
) == NOP_EXPR
8873 || TREE_CODE (offset
) == CONVERT_EXPR
)
8874 offset
= TREE_OPERAND (offset
, 0);
8876 if (TREE_CODE (offset
) != NEGATE_EXPR
)
8879 offset
= TREE_OPERAND (offset
, 0);
8880 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
8881 || TREE_CODE (offset
) == NOP_EXPR
8882 || TREE_CODE (offset
) == CONVERT_EXPR
)
8883 offset
= TREE_OPERAND (offset
, 0);
8885 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8886 whose type is the same as EXP. */
8887 return (TREE_CODE (offset
) == ADDR_EXPR
8888 && (TREE_OPERAND (offset
, 0) == exp
8889 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
8890 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
8891 == TREE_TYPE (exp
)))));
8894 /* Return the tree node if a ARG corresponds to a string constant or zero
8895 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8896 in bytes within the string that ARG is accessing. The type of the
8897 offset will be `sizetype'. */
8900 string_constant (arg
, ptr_offset
)
8906 if (TREE_CODE (arg
) == ADDR_EXPR
8907 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8909 *ptr_offset
= size_zero_node
;
8910 return TREE_OPERAND (arg
, 0);
8912 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8914 tree arg0
= TREE_OPERAND (arg
, 0);
8915 tree arg1
= TREE_OPERAND (arg
, 1);
8920 if (TREE_CODE (arg0
) == ADDR_EXPR
8921 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8923 *ptr_offset
= convert (sizetype
, arg1
);
8924 return TREE_OPERAND (arg0
, 0);
8926 else if (TREE_CODE (arg1
) == ADDR_EXPR
8927 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8929 *ptr_offset
= convert (sizetype
, arg0
);
8930 return TREE_OPERAND (arg1
, 0);
8937 /* Expand code for a post- or pre- increment or decrement
8938 and return the RTX for the result.
8939 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8942 expand_increment (exp
, post
, ignore
)
8948 tree incremented
= TREE_OPERAND (exp
, 0);
8949 optab this_optab
= add_optab
;
8951 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8952 int op0_is_copy
= 0;
8953 int single_insn
= 0;
8954 /* 1 means we can't store into OP0 directly,
8955 because it is a subreg narrower than a word,
8956 and we don't dare clobber the rest of the word. */
8959 /* Stabilize any component ref that might need to be
8960 evaluated more than once below. */
8962 || TREE_CODE (incremented
) == BIT_FIELD_REF
8963 || (TREE_CODE (incremented
) == COMPONENT_REF
8964 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8965 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8966 incremented
= stabilize_reference (incremented
);
8967 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8968 ones into save exprs so that they don't accidentally get evaluated
8969 more than once by the code below. */
8970 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8971 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8972 incremented
= save_expr (incremented
);
8974 /* Compute the operands as RTX.
8975 Note whether OP0 is the actual lvalue or a copy of it:
8976 I believe it is a copy iff it is a register or subreg
8977 and insns were generated in computing it. */
8979 temp
= get_last_insn ();
8980 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
8982 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8983 in place but instead must do sign- or zero-extension during assignment,
8984 so we copy it into a new register and let the code below use it as
8987 Note that we can safely modify this SUBREG since it is know not to be
8988 shared (it was made by the expand_expr call above). */
8990 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8993 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8997 else if (GET_CODE (op0
) == SUBREG
8998 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9000 /* We cannot increment this SUBREG in place. If we are
9001 post-incrementing, get a copy of the old value. Otherwise,
9002 just mark that we cannot increment in place. */
9004 op0
= copy_to_reg (op0
);
9009 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9010 && temp
!= get_last_insn ());
9011 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9013 /* Decide whether incrementing or decrementing. */
9014 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9015 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9016 this_optab
= sub_optab
;
9018 /* Convert decrement by a constant into a negative increment. */
9019 if (this_optab
== sub_optab
9020 && GET_CODE (op1
) == CONST_INT
)
9022 op1
= GEN_INT (-INTVAL (op1
));
9023 this_optab
= add_optab
;
9026 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9027 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9029 /* For a preincrement, see if we can do this with a single instruction. */
9032 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9033 if (icode
!= (int) CODE_FOR_nothing
9034 /* Make sure that OP0 is valid for operands 0 and 1
9035 of the insn we want to queue. */
9036 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9037 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9038 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9042 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9043 then we cannot just increment OP0. We must therefore contrive to
9044 increment the original value. Then, for postincrement, we can return
9045 OP0 since it is a copy of the old value. For preincrement, expand here
9046 unless we can do it with a single insn.
9048 Likewise if storing directly into OP0 would clobber high bits
9049 we need to preserve (bad_subreg). */
9050 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9052 /* This is the easiest way to increment the value wherever it is.
9053 Problems with multiple evaluation of INCREMENTED are prevented
9054 because either (1) it is a component_ref or preincrement,
9055 in which case it was stabilized above, or (2) it is an array_ref
9056 with constant index in an array in a register, which is
9057 safe to reevaluate. */
9058 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9059 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9060 ? MINUS_EXPR
: PLUS_EXPR
),
9063 TREE_OPERAND (exp
, 1));
9065 while (TREE_CODE (incremented
) == NOP_EXPR
9066 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9068 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9069 incremented
= TREE_OPERAND (incremented
, 0);
9072 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9073 return post
? op0
: temp
;
9078 /* We have a true reference to the value in OP0.
9079 If there is an insn to add or subtract in this mode, queue it.
9080 Queueing the increment insn avoids the register shuffling
9081 that often results if we must increment now and first save
9082 the old value for subsequent use. */
9084 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9085 op0
= stabilize (op0
);
9088 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9089 if (icode
!= (int) CODE_FOR_nothing
9090 /* Make sure that OP0 is valid for operands 0 and 1
9091 of the insn we want to queue. */
9092 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9093 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9095 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9096 op1
= force_reg (mode
, op1
);
9098 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9100 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9102 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9103 ? force_reg (Pmode
, XEXP (op0
, 0))
9104 : copy_to_reg (XEXP (op0
, 0)));
9107 op0
= replace_equiv_address (op0
, addr
);
9108 temp
= force_reg (GET_MODE (op0
), op0
);
9109 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9110 op1
= force_reg (mode
, op1
);
9112 /* The increment queue is LIFO, thus we have to `queue'
9113 the instructions in reverse order. */
9114 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9115 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9120 /* Preincrement, or we can't increment with one simple insn. */
9122 /* Save a copy of the value before inc or dec, to return it later. */
9123 temp
= value
= copy_to_reg (op0
);
9125 /* Arrange to return the incremented value. */
9126 /* Copy the rtx because expand_binop will protect from the queue,
9127 and the results of that would be invalid for us to return
9128 if our caller does emit_queue before using our result. */
9129 temp
= copy_rtx (value
= op0
);
9131 /* Increment however we can. */
9132 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9133 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9135 /* Make sure the value is stored into OP0. */
9137 emit_move_insn (op0
, op1
);
9142 /* At the start of a function, record that we have no previously-pushed
9143 arguments waiting to be popped. */
9146 init_pending_stack_adjust ()
9148 pending_stack_adjust
= 0;
9151 /* When exiting from function, if safe, clear out any pending stack adjust
9152 so the adjustment won't get done.
9154 Note, if the current function calls alloca, then it must have a
9155 frame pointer regardless of the value of flag_omit_frame_pointer. */
9158 clear_pending_stack_adjust ()
9160 #ifdef EXIT_IGNORE_STACK
9162 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9163 && EXIT_IGNORE_STACK
9164 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9165 && ! flag_inline_functions
)
9167 stack_pointer_delta
-= pending_stack_adjust
,
9168 pending_stack_adjust
= 0;
9173 /* Pop any previously-pushed arguments that have not been popped yet. */
9176 do_pending_stack_adjust ()
9178 if (inhibit_defer_pop
== 0)
9180 if (pending_stack_adjust
!= 0)
9181 adjust_stack (GEN_INT (pending_stack_adjust
));
9182 pending_stack_adjust
= 0;
9186 /* Expand conditional expressions. */
9188 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9189 LABEL is an rtx of code CODE_LABEL, in this function and all the
9193 jumpifnot (exp
, label
)
9197 do_jump (exp
, label
, NULL_RTX
);
9200 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9207 do_jump (exp
, NULL_RTX
, label
);
9210 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9211 the result is zero, or IF_TRUE_LABEL if the result is one.
9212 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9213 meaning fall through in that case.
9215 do_jump always does any pending stack adjust except when it does not
9216 actually perform a jump. An example where there is no jump
9217 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9219 This function is responsible for optimizing cases such as
9220 &&, || and comparison operators in EXP. */
9223 do_jump (exp
, if_false_label
, if_true_label
)
9225 rtx if_false_label
, if_true_label
;
9227 enum tree_code code
= TREE_CODE (exp
);
9228 /* Some cases need to create a label to jump to
9229 in order to properly fall through.
9230 These cases set DROP_THROUGH_LABEL nonzero. */
9231 rtx drop_through_label
= 0;
9235 enum machine_mode mode
;
9237 #ifdef MAX_INTEGER_COMPUTATION_MODE
9238 check_max_integer_computation_mode (exp
);
9249 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9255 /* This is not true with #pragma weak */
9257 /* The address of something can never be zero. */
9259 emit_jump (if_true_label
);
9264 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9265 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9266 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9267 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9270 /* If we are narrowing the operand, we have to do the compare in the
9272 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9273 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9275 case NON_LVALUE_EXPR
:
9276 case REFERENCE_EXPR
:
9281 /* These cannot change zero->non-zero or vice versa. */
9282 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9285 case WITH_RECORD_EXPR
:
9286 /* Put the object on the placeholder list, recurse through our first
9287 operand, and pop the list. */
9288 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9290 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9291 placeholder_list
= TREE_CHAIN (placeholder_list
);
9295 /* This is never less insns than evaluating the PLUS_EXPR followed by
9296 a test and can be longer if the test is eliminated. */
9298 /* Reduce to minus. */
9299 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9300 TREE_OPERAND (exp
, 0),
9301 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9302 TREE_OPERAND (exp
, 1))));
9303 /* Process as MINUS. */
9307 /* Non-zero iff operands of minus differ. */
9308 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9309 TREE_OPERAND (exp
, 0),
9310 TREE_OPERAND (exp
, 1)),
9311 NE
, NE
, if_false_label
, if_true_label
);
9315 /* If we are AND'ing with a small constant, do this comparison in the
9316 smallest type that fits. If the machine doesn't have comparisons
9317 that small, it will be converted back to the wider comparison.
9318 This helps if we are testing the sign bit of a narrower object.
9319 combine can't do this for us because it can't know whether a
9320 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9322 if (! SLOW_BYTE_ACCESS
9323 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9324 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9325 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9326 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9327 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
9328 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9329 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9330 != CODE_FOR_nothing
))
9332 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9337 case TRUTH_NOT_EXPR
:
9338 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9341 case TRUTH_ANDIF_EXPR
:
9342 if (if_false_label
== 0)
9343 if_false_label
= drop_through_label
= gen_label_rtx ();
9344 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9345 start_cleanup_deferral ();
9346 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9347 end_cleanup_deferral ();
9350 case TRUTH_ORIF_EXPR
:
9351 if (if_true_label
== 0)
9352 if_true_label
= drop_through_label
= gen_label_rtx ();
9353 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9354 start_cleanup_deferral ();
9355 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9356 end_cleanup_deferral ();
9361 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9362 preserve_temp_slots (NULL_RTX
);
9366 do_pending_stack_adjust ();
9367 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9373 case ARRAY_RANGE_REF
:
9375 HOST_WIDE_INT bitsize
, bitpos
;
9377 enum machine_mode mode
;
9382 /* Get description of this reference. We don't actually care
9383 about the underlying object here. */
9384 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9385 &unsignedp
, &volatilep
);
9387 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
9388 if (! SLOW_BYTE_ACCESS
9389 && type
!= 0 && bitsize
>= 0
9390 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9391 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9392 != CODE_FOR_nothing
))
9394 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9401 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9402 if (integer_onep (TREE_OPERAND (exp
, 1))
9403 && integer_zerop (TREE_OPERAND (exp
, 2)))
9404 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9406 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9407 && integer_onep (TREE_OPERAND (exp
, 2)))
9408 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9412 rtx label1
= gen_label_rtx ();
9413 drop_through_label
= gen_label_rtx ();
9415 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9417 start_cleanup_deferral ();
9418 /* Now the THEN-expression. */
9419 do_jump (TREE_OPERAND (exp
, 1),
9420 if_false_label
? if_false_label
: drop_through_label
,
9421 if_true_label
? if_true_label
: drop_through_label
);
9422 /* In case the do_jump just above never jumps. */
9423 do_pending_stack_adjust ();
9424 emit_label (label1
);
9426 /* Now the ELSE-expression. */
9427 do_jump (TREE_OPERAND (exp
, 2),
9428 if_false_label
? if_false_label
: drop_through_label
,
9429 if_true_label
? if_true_label
: drop_through_label
);
9430 end_cleanup_deferral ();
9436 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9438 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9439 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9441 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9442 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9445 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9446 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9447 fold (build1 (REALPART_EXPR
,
9448 TREE_TYPE (inner_type
),
9450 fold (build1 (REALPART_EXPR
,
9451 TREE_TYPE (inner_type
),
9453 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9454 fold (build1 (IMAGPART_EXPR
,
9455 TREE_TYPE (inner_type
),
9457 fold (build1 (IMAGPART_EXPR
,
9458 TREE_TYPE (inner_type
),
9460 if_false_label
, if_true_label
);
9463 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9464 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9466 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9467 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9468 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9470 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9476 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9478 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9479 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9481 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9482 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9485 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9486 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9487 fold (build1 (REALPART_EXPR
,
9488 TREE_TYPE (inner_type
),
9490 fold (build1 (REALPART_EXPR
,
9491 TREE_TYPE (inner_type
),
9493 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9494 fold (build1 (IMAGPART_EXPR
,
9495 TREE_TYPE (inner_type
),
9497 fold (build1 (IMAGPART_EXPR
,
9498 TREE_TYPE (inner_type
),
9500 if_false_label
, if_true_label
);
9503 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9504 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9506 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9507 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9508 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9510 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9515 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9516 if (GET_MODE_CLASS (mode
) == MODE_INT
9517 && ! can_compare_p (LT
, mode
, ccp_jump
))
9518 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9520 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9524 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9525 if (GET_MODE_CLASS (mode
) == MODE_INT
9526 && ! can_compare_p (LE
, mode
, ccp_jump
))
9527 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9529 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9533 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9534 if (GET_MODE_CLASS (mode
) == MODE_INT
9535 && ! can_compare_p (GT
, mode
, ccp_jump
))
9536 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9538 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9542 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9543 if (GET_MODE_CLASS (mode
) == MODE_INT
9544 && ! can_compare_p (GE
, mode
, ccp_jump
))
9545 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9547 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9550 case UNORDERED_EXPR
:
9553 enum rtx_code cmp
, rcmp
;
9556 if (code
== UNORDERED_EXPR
)
9557 cmp
= UNORDERED
, rcmp
= ORDERED
;
9559 cmp
= ORDERED
, rcmp
= UNORDERED
;
9560 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9563 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9564 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9565 /* If the target doesn't provide either UNORDERED or ORDERED
9566 comparisons, canonicalize on UNORDERED for the library. */
9567 || rcmp
== UNORDERED
))
9571 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9573 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9578 enum rtx_code rcode1
;
9579 enum tree_code tcode2
;
9603 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9604 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9605 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9609 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9610 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9613 /* If the target doesn't support combined unordered
9614 compares, decompose into UNORDERED + comparison. */
9615 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9616 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9617 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9618 do_jump (exp
, if_false_label
, if_true_label
);
9624 __builtin_expect (<test>, 0) and
9625 __builtin_expect (<test>, 1)
9627 We need to do this here, so that <test> is not converted to a SCC
9628 operation on machines that use condition code registers and COMPARE
9629 like the PowerPC, and then the jump is done based on whether the SCC
9630 operation produced a 1 or 0. */
9632 /* Check for a built-in function. */
9633 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9635 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9636 tree arglist
= TREE_OPERAND (exp
, 1);
9638 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9639 && DECL_BUILT_IN (fndecl
)
9640 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9641 && arglist
!= NULL_TREE
9642 && TREE_CHAIN (arglist
) != NULL_TREE
)
9644 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9647 if (seq
!= NULL_RTX
)
9654 /* fall through and generate the normal code. */
9658 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9660 /* This is not needed any more and causes poor code since it causes
9661 comparisons and tests from non-SI objects to have different code
9663 /* Copy to register to avoid generating bad insns by cse
9664 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9665 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9666 temp
= copy_to_reg (temp
);
9668 do_pending_stack_adjust ();
9669 /* Do any postincrements in the expression that was tested. */
9672 if (GET_CODE (temp
) == CONST_INT
9673 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9674 || GET_CODE (temp
) == LABEL_REF
)
9676 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9680 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9681 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9682 /* Note swapping the labels gives us not-equal. */
9683 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9684 else if (GET_MODE (temp
) != VOIDmode
)
9685 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9686 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9687 GET_MODE (temp
), NULL_RTX
,
9688 if_false_label
, if_true_label
);
9693 if (drop_through_label
)
9695 /* If do_jump produces code that might be jumped around,
9696 do any stack adjusts from that code, before the place
9697 where control merges in. */
9698 do_pending_stack_adjust ();
9699 emit_label (drop_through_label
);
9703 /* Given a comparison expression EXP for values too wide to be compared
9704 with one insn, test the comparison and jump to the appropriate label.
9705 The code of EXP is ignored; we always test GT if SWAP is 0,
9706 and LT if SWAP is 1. */
9709 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9712 rtx if_false_label
, if_true_label
;
9714 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9715 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9716 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9717 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9719 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9722 /* Compare OP0 with OP1, word at a time, in mode MODE.
9723 UNSIGNEDP says to do unsigned comparison.
9724 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9727 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9728 enum machine_mode mode
;
9731 rtx if_false_label
, if_true_label
;
9733 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9734 rtx drop_through_label
= 0;
9737 if (! if_true_label
|| ! if_false_label
)
9738 drop_through_label
= gen_label_rtx ();
9739 if (! if_true_label
)
9740 if_true_label
= drop_through_label
;
9741 if (! if_false_label
)
9742 if_false_label
= drop_through_label
;
9744 /* Compare a word at a time, high order first. */
9745 for (i
= 0; i
< nwords
; i
++)
9747 rtx op0_word
, op1_word
;
9749 if (WORDS_BIG_ENDIAN
)
9751 op0_word
= operand_subword_force (op0
, i
, mode
);
9752 op1_word
= operand_subword_force (op1
, i
, mode
);
9756 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9757 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9760 /* All but high-order word must be compared as unsigned. */
9761 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9762 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9763 NULL_RTX
, if_true_label
);
9765 /* Consider lower words only if these are equal. */
9766 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9767 NULL_RTX
, NULL_RTX
, if_false_label
);
9771 emit_jump (if_false_label
);
9772 if (drop_through_label
)
9773 emit_label (drop_through_label
);
9776 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9777 with one insn, test the comparison and jump to the appropriate label. */
9780 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9782 rtx if_false_label
, if_true_label
;
9784 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9785 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9786 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9787 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9789 rtx drop_through_label
= 0;
9791 if (! if_false_label
)
9792 drop_through_label
= if_false_label
= gen_label_rtx ();
9794 for (i
= 0; i
< nwords
; i
++)
9795 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9796 operand_subword_force (op1
, i
, mode
),
9797 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9798 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9801 emit_jump (if_true_label
);
9802 if (drop_through_label
)
9803 emit_label (drop_through_label
);
9806 /* Jump according to whether OP0 is 0.
9807 We assume that OP0 has an integer mode that is too wide
9808 for the available compare insns. */
9811 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9813 rtx if_false_label
, if_true_label
;
9815 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9818 rtx drop_through_label
= 0;
9820 /* The fastest way of doing this comparison on almost any machine is to
9821 "or" all the words and compare the result. If all have to be loaded
9822 from memory and this is a very wide item, it's possible this may
9823 be slower, but that's highly unlikely. */
9825 part
= gen_reg_rtx (word_mode
);
9826 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9827 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9828 part
= expand_binop (word_mode
, ior_optab
, part
,
9829 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9830 part
, 1, OPTAB_WIDEN
);
9834 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9835 NULL_RTX
, if_false_label
, if_true_label
);
9840 /* If we couldn't do the "or" simply, do this with a series of compares. */
9841 if (! if_false_label
)
9842 drop_through_label
= if_false_label
= gen_label_rtx ();
9844 for (i
= 0; i
< nwords
; i
++)
9845 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9846 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
9847 if_false_label
, NULL_RTX
);
9850 emit_jump (if_true_label
);
9852 if (drop_through_label
)
9853 emit_label (drop_through_label
);
9856 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9857 (including code to compute the values to be compared)
9858 and set (CC0) according to the result.
9859 The decision as to signed or unsigned comparison must be made by the caller.
9861 We force a stack adjustment unless there are currently
9862 things pushed on the stack that aren't yet used.
9864 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9868 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
9872 enum machine_mode mode
;
9877 /* If one operand is constant, make it the second one. Only do this
9878 if the other operand is not constant as well. */
9880 if (swap_commutative_operands_p (op0
, op1
))
9885 code
= swap_condition (code
);
9890 op0
= force_not_mem (op0
);
9891 op1
= force_not_mem (op1
);
9894 do_pending_stack_adjust ();
9896 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9897 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9901 /* There's no need to do this now that combine.c can eliminate lots of
9902 sign extensions. This can be less efficient in certain cases on other
9905 /* If this is a signed equality comparison, we can do it as an
9906 unsigned comparison since zero-extension is cheaper than sign
9907 extension and comparisons with zero are done as unsigned. This is
9908 the case even on machines that can do fast sign extension, since
9909 zero-extension is easier to combine with other operations than
9910 sign-extension is. If we are comparing against a constant, we must
9911 convert it to what it would look like unsigned. */
9912 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9913 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9915 if (GET_CODE (op1
) == CONST_INT
9916 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9917 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9922 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
9924 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9927 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9928 The decision as to signed or unsigned comparison must be made by the caller.
9930 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9934 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
9935 if_false_label
, if_true_label
)
9939 enum machine_mode mode
;
9941 rtx if_false_label
, if_true_label
;
9944 int dummy_true_label
= 0;
9946 /* Reverse the comparison if that is safe and we want to jump if it is
9948 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9950 if_true_label
= if_false_label
;
9952 code
= reverse_condition (code
);
9955 /* If one operand is constant, make it the second one. Only do this
9956 if the other operand is not constant as well. */
9958 if (swap_commutative_operands_p (op0
, op1
))
9963 code
= swap_condition (code
);
9968 op0
= force_not_mem (op0
);
9969 op1
= force_not_mem (op1
);
9972 do_pending_stack_adjust ();
9974 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9975 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9977 if (tem
== const_true_rtx
)
9980 emit_jump (if_true_label
);
9985 emit_jump (if_false_label
);
9991 /* There's no need to do this now that combine.c can eliminate lots of
9992 sign extensions. This can be less efficient in certain cases on other
9995 /* If this is a signed equality comparison, we can do it as an
9996 unsigned comparison since zero-extension is cheaper than sign
9997 extension and comparisons with zero are done as unsigned. This is
9998 the case even on machines that can do fast sign extension, since
9999 zero-extension is easier to combine with other operations than
10000 sign-extension is. If we are comparing against a constant, we must
10001 convert it to what it would look like unsigned. */
10002 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10003 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10005 if (GET_CODE (op1
) == CONST_INT
10006 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10007 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10012 if (! if_true_label
)
10014 dummy_true_label
= 1;
10015 if_true_label
= gen_label_rtx ();
10018 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10021 if (if_false_label
)
10022 emit_jump (if_false_label
);
10023 if (dummy_true_label
)
10024 emit_label (if_true_label
);
10027 /* Generate code for a comparison expression EXP (including code to compute
10028 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10029 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10030 generated code will drop through.
10031 SIGNED_CODE should be the rtx operation for this comparison for
10032 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10034 We force a stack adjustment unless there are currently
10035 things pushed on the stack that aren't yet used. */
10038 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10041 enum rtx_code signed_code
, unsigned_code
;
10042 rtx if_false_label
, if_true_label
;
10046 enum machine_mode mode
;
10048 enum rtx_code code
;
10050 /* Don't crash if the comparison was erroneous. */
10051 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10052 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10055 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10056 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10059 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10060 mode
= TYPE_MODE (type
);
10061 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10062 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10063 || (GET_MODE_BITSIZE (mode
)
10064 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10067 /* op0 might have been replaced by promoted constant, in which
10068 case the type of second argument should be used. */
10069 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10070 mode
= TYPE_MODE (type
);
10072 unsignedp
= TREE_UNSIGNED (type
);
10073 code
= unsignedp
? unsigned_code
: signed_code
;
10075 #ifdef HAVE_canonicalize_funcptr_for_compare
10076 /* If function pointers need to be "canonicalized" before they can
10077 be reliably compared, then canonicalize them. */
10078 if (HAVE_canonicalize_funcptr_for_compare
10079 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10080 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10083 rtx new_op0
= gen_reg_rtx (mode
);
10085 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10089 if (HAVE_canonicalize_funcptr_for_compare
10090 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10091 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10094 rtx new_op1
= gen_reg_rtx (mode
);
10096 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10101 /* Do any postincrements in the expression that was tested. */
10104 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10106 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10107 if_false_label
, if_true_label
);
10110 /* Generate code to calculate EXP using a store-flag instruction
10111 and return an rtx for the result. EXP is either a comparison
10112 or a TRUTH_NOT_EXPR whose operand is a comparison.
10114 If TARGET is nonzero, store the result there if convenient.
10116 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10119 Return zero if there is no suitable set-flag instruction
10120 available on this machine.
10122 Once expand_expr has been called on the arguments of the comparison,
10123 we are committed to doing the store flag, since it is not safe to
10124 re-evaluate the expression. We emit the store-flag insn by calling
10125 emit_store_flag, but only expand the arguments if we have a reason
10126 to believe that emit_store_flag will be successful. If we think that
10127 it will, but it isn't, we have to simulate the store-flag with a
10128 set/jump/set sequence. */
10131 do_store_flag (exp
, target
, mode
, only_cheap
)
10134 enum machine_mode mode
;
10137 enum rtx_code code
;
10138 tree arg0
, arg1
, type
;
10140 enum machine_mode operand_mode
;
10144 enum insn_code icode
;
10145 rtx subtarget
= target
;
10148 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10149 result at the end. We can't simply invert the test since it would
10150 have already been inverted if it were valid. This case occurs for
10151 some floating-point comparisons. */
10153 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10154 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10156 arg0
= TREE_OPERAND (exp
, 0);
10157 arg1
= TREE_OPERAND (exp
, 1);
10159 /* Don't crash if the comparison was erroneous. */
10160 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10163 type
= TREE_TYPE (arg0
);
10164 operand_mode
= TYPE_MODE (type
);
10165 unsignedp
= TREE_UNSIGNED (type
);
10167 /* We won't bother with BLKmode store-flag operations because it would mean
10168 passing a lot of information to emit_store_flag. */
10169 if (operand_mode
== BLKmode
)
10172 /* We won't bother with store-flag operations involving function pointers
10173 when function pointers must be canonicalized before comparisons. */
10174 #ifdef HAVE_canonicalize_funcptr_for_compare
10175 if (HAVE_canonicalize_funcptr_for_compare
10176 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10177 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10179 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10181 == FUNCTION_TYPE
))))
10188 /* Get the rtx comparison code to use. We know that EXP is a comparison
10189 operation of some type. Some comparisons against 1 and -1 can be
10190 converted to comparisons with zero. Do so here so that the tests
10191 below will be aware that we have a comparison with zero. These
10192 tests will not catch constants in the first operand, but constants
10193 are rarely passed as the first operand. */
10195 switch (TREE_CODE (exp
))
10204 if (integer_onep (arg1
))
10205 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10207 code
= unsignedp
? LTU
: LT
;
10210 if (! unsignedp
&& integer_all_onesp (arg1
))
10211 arg1
= integer_zero_node
, code
= LT
;
10213 code
= unsignedp
? LEU
: LE
;
10216 if (! unsignedp
&& integer_all_onesp (arg1
))
10217 arg1
= integer_zero_node
, code
= GE
;
10219 code
= unsignedp
? GTU
: GT
;
10222 if (integer_onep (arg1
))
10223 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10225 code
= unsignedp
? GEU
: GE
;
10228 case UNORDERED_EXPR
:
10254 /* Put a constant second. */
10255 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10257 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10258 code
= swap_condition (code
);
10261 /* If this is an equality or inequality test of a single bit, we can
10262 do this by shifting the bit being tested to the low-order bit and
10263 masking the result with the constant 1. If the condition was EQ,
10264 we xor it with 1. This does not require an scc insn and is faster
10265 than an scc insn even if we have it. */
10267 if ((code
== NE
|| code
== EQ
)
10268 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10269 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10271 tree inner
= TREE_OPERAND (arg0
, 0);
10272 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10275 /* If INNER is a right shift of a constant and it plus BITNUM does
10276 not overflow, adjust BITNUM and INNER. */
10278 if (TREE_CODE (inner
) == RSHIFT_EXPR
10279 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10280 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10281 && bitnum
< TYPE_PRECISION (type
)
10282 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10283 bitnum
- TYPE_PRECISION (type
)))
10285 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10286 inner
= TREE_OPERAND (inner
, 0);
10289 /* If we are going to be able to omit the AND below, we must do our
10290 operations as unsigned. If we must use the AND, we have a choice.
10291 Normally unsigned is faster, but for some machines signed is. */
10292 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10293 #ifdef LOAD_EXTEND_OP
10294 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10300 if (! get_subtarget (subtarget
)
10301 || GET_MODE (subtarget
) != operand_mode
10302 || ! safe_from_p (subtarget
, inner
, 1))
10305 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10308 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10309 size_int (bitnum
), subtarget
, ops_unsignedp
);
10311 if (GET_MODE (op0
) != mode
)
10312 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10314 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10315 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10316 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10318 /* Put the AND last so it can combine with more things. */
10319 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10320 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10325 /* Now see if we are likely to be able to do this. Return if not. */
10326 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10329 icode
= setcc_gen_code
[(int) code
];
10330 if (icode
== CODE_FOR_nothing
10331 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10333 /* We can only do this if it is one of the special cases that
10334 can be handled without an scc insn. */
10335 if ((code
== LT
&& integer_zerop (arg1
))
10336 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10338 else if (BRANCH_COST
>= 0
10339 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10340 && TREE_CODE (type
) != REAL_TYPE
10341 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10342 != CODE_FOR_nothing
)
10343 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10344 != CODE_FOR_nothing
)))
10350 if (! get_subtarget (target
)
10351 || GET_MODE (subtarget
) != operand_mode
10352 || ! safe_from_p (subtarget
, arg1
, 1))
10355 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10356 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10359 target
= gen_reg_rtx (mode
);
10361 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10362 because, if the emit_store_flag does anything it will succeed and
10363 OP0 and OP1 will not be used subsequently. */
10365 result
= emit_store_flag (target
, code
,
10366 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10367 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10368 operand_mode
, unsignedp
, 1);
10373 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10374 result
, 0, OPTAB_LIB_WIDEN
);
10378 /* If this failed, we have to do this with set/compare/jump/set code. */
10379 if (GET_CODE (target
) != REG
10380 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10381 target
= gen_reg_rtx (GET_MODE (target
));
10383 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10384 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10385 operand_mode
, NULL_RTX
);
10386 if (GET_CODE (result
) == CONST_INT
)
10387 return (((result
== const0_rtx
&& ! invert
)
10388 || (result
!= const0_rtx
&& invert
))
10389 ? const0_rtx
: const1_rtx
);
10391 /* The code of RESULT may not match CODE if compare_from_rtx
10392 decided to swap its operands and reverse the original code.
10394 We know that compare_from_rtx returns either a CONST_INT or
10395 a new comparison code, so it is safe to just extract the
10396 code from RESULT. */
10397 code
= GET_CODE (result
);
10399 label
= gen_label_rtx ();
10400 if (bcc_gen_fctn
[(int) code
] == 0)
10403 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10404 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10405 emit_label (label
);
10411 /* Stubs in case we haven't got a casesi insn. */
10412 #ifndef HAVE_casesi
10413 # define HAVE_casesi 0
10414 # define gen_casesi(a, b, c, d, e) (0)
10415 # define CODE_FOR_casesi CODE_FOR_nothing
10418 /* If the machine does not have a case insn that compares the bounds,
10419 this means extra overhead for dispatch tables, which raises the
10420 threshold for using them. */
10421 #ifndef CASE_VALUES_THRESHOLD
10422 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10423 #endif /* CASE_VALUES_THRESHOLD */
10426 case_values_threshold ()
10428 return CASE_VALUES_THRESHOLD
;
10431 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10432 0 otherwise (i.e. if there is no casesi instruction). */
10434 try_casesi (index_type
, index_expr
, minval
, range
,
10435 table_label
, default_label
)
10436 tree index_type
, index_expr
, minval
, range
;
10437 rtx table_label ATTRIBUTE_UNUSED
;
10440 enum machine_mode index_mode
= SImode
;
10441 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10442 rtx op1
, op2
, index
;
10443 enum machine_mode op_mode
;
10448 /* Convert the index to SImode. */
10449 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10451 enum machine_mode omode
= TYPE_MODE (index_type
);
10452 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10454 /* We must handle the endpoints in the original mode. */
10455 index_expr
= build (MINUS_EXPR
, index_type
,
10456 index_expr
, minval
);
10457 minval
= integer_zero_node
;
10458 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10459 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10460 omode
, 1, default_label
);
10461 /* Now we can safely truncate. */
10462 index
= convert_to_mode (index_mode
, index
, 0);
10466 if (TYPE_MODE (index_type
) != index_mode
)
10468 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10469 (index_bits
, 0), index_expr
);
10470 index_type
= TREE_TYPE (index_expr
);
10473 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10476 index
= protect_from_queue (index
, 0);
10477 do_pending_stack_adjust ();
10479 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10480 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10482 index
= copy_to_mode_reg (op_mode
, index
);
10484 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10486 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10487 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10488 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10489 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10491 op1
= copy_to_mode_reg (op_mode
, op1
);
10493 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10495 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10496 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10497 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10498 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10500 op2
= copy_to_mode_reg (op_mode
, op2
);
10502 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10503 table_label
, default_label
));
10507 /* Attempt to generate a tablejump instruction; same concept. */
10508 #ifndef HAVE_tablejump
10509 #define HAVE_tablejump 0
10510 #define gen_tablejump(x, y) (0)
10513 /* Subroutine of the next function.
10515 INDEX is the value being switched on, with the lowest value
10516 in the table already subtracted.
10517 MODE is its expected mode (needed if INDEX is constant).
10518 RANGE is the length of the jump table.
10519 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10521 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10522 index value is out of range. */
10525 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10526 rtx index
, range
, table_label
, default_label
;
10527 enum machine_mode mode
;
10531 /* Do an unsigned comparison (in the proper mode) between the index
10532 expression and the value which represents the length of the range.
10533 Since we just finished subtracting the lower bound of the range
10534 from the index expression, this comparison allows us to simultaneously
10535 check that the original index expression value is both greater than
10536 or equal to the minimum value of the range and less than or equal to
10537 the maximum value of the range. */
10539 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10542 /* If index is in range, it must fit in Pmode.
10543 Convert to Pmode so we can index with it. */
10545 index
= convert_to_mode (Pmode
, index
, 1);
10547 /* Don't let a MEM slip thru, because then INDEX that comes
10548 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10549 and break_out_memory_refs will go to work on it and mess it up. */
10550 #ifdef PIC_CASE_VECTOR_ADDRESS
10551 if (flag_pic
&& GET_CODE (index
) != REG
)
10552 index
= copy_to_mode_reg (Pmode
, index
);
10555 /* If flag_force_addr were to affect this address
10556 it could interfere with the tricky assumptions made
10557 about addresses that contain label-refs,
10558 which may be valid only very near the tablejump itself. */
10559 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10560 GET_MODE_SIZE, because this indicates how large insns are. The other
10561 uses should all be Pmode, because they are addresses. This code
10562 could fail if addresses and insns are not the same size. */
10563 index
= gen_rtx_PLUS (Pmode
,
10564 gen_rtx_MULT (Pmode
, index
,
10565 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10566 gen_rtx_LABEL_REF (Pmode
, table_label
));
10567 #ifdef PIC_CASE_VECTOR_ADDRESS
10569 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10572 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10573 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10574 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10575 RTX_UNCHANGING_P (vector
) = 1;
10576 convert_move (temp
, vector
, 0);
10578 emit_jump_insn (gen_tablejump (temp
, table_label
));
10580 /* If we are generating PIC code or if the table is PC-relative, the
10581 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10582 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10587 try_tablejump (index_type
, index_expr
, minval
, range
,
10588 table_label
, default_label
)
10589 tree index_type
, index_expr
, minval
, range
;
10590 rtx table_label
, default_label
;
10594 if (! HAVE_tablejump
)
10597 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10598 convert (index_type
, index_expr
),
10599 convert (index_type
, minval
)));
10600 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10602 index
= protect_from_queue (index
, 0);
10603 do_pending_stack_adjust ();
10605 do_tablejump (index
, TYPE_MODE (index_type
),
10606 convert_modes (TYPE_MODE (index_type
),
10607 TYPE_MODE (TREE_TYPE (range
)),
10608 expand_expr (range
, NULL_RTX
,
10610 TREE_UNSIGNED (TREE_TYPE (range
))),
10611 table_label
, default_label
);