1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static rtx
enqueue_insn (rtx
, rtx
);
134 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
136 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct move_by_pieces
*);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
140 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
141 static tree
emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
143 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
144 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
147 struct store_by_pieces
*);
148 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
149 static rtx
clear_storage_via_libcall (rtx
, rtx
);
150 static tree
clear_storage_libcall_fn (int);
151 static rtx
compress_float_constant (rtx
, rtx
);
152 static rtx
get_subtarget (rtx
);
153 static int is_zeros_p (tree
);
154 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
155 HOST_WIDE_INT
, enum machine_mode
,
156 tree
, tree
, int, int);
157 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
158 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
159 tree
, enum machine_mode
, int, tree
, int);
160 static rtx
var_rtx (tree
);
162 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
163 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
165 static int is_aligning_offset (tree
, tree
);
166 static rtx
expand_increment (tree
, int, int);
167 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
169 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
171 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
172 static rtx
const_vector_from_tree (tree
);
174 /* Record for each mode whether we can move a register directly to or
175 from an object of that mode in memory. If we can't, we won't try
176 to use that mode directly when accessing a field of that mode. */
178 static char direct_load
[NUM_MACHINE_MODES
];
179 static char direct_store
[NUM_MACHINE_MODES
];
181 /* Record for each mode whether we can float-extend from memory. */
183 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
185 /* If a memory-to-memory move would take MOVE_RATIO or more simple
186 move-instruction sequences, we will do a movstr or libcall instead. */
189 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 /* If we are optimizing for space (-Os), cut down the default move ratio. */
193 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 /* This macro is used to determine whether move_by_pieces should be called
198 to perform a structure copy. */
199 #ifndef MOVE_BY_PIECES_P
200 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 /* If a clear memory operation would take CLEAR_RATIO or more simple
205 move-instruction sequences, we will do a clrstr or libcall instead. */
208 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
209 #define CLEAR_RATIO 2
211 /* If we are optimizing for space, cut down the default clear ratio. */
212 #define CLEAR_RATIO (optimize_size ? 3 : 15)
216 /* This macro is used to determine whether clear_by_pieces should be
217 called to clear storage. */
218 #ifndef CLEAR_BY_PIECES_P
219 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
220 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
223 /* This macro is used to determine whether store_by_pieces should be
224 called to "memset" storage with byte values other than zero, or
225 to "memcpy" storage when the source is a constant string. */
226 #ifndef STORE_BY_PIECES_P
227 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
246 init_expr_once (void)
249 enum machine_mode mode
;
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
258 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg
= gen_rtx_REG (VOIDmode
, -1);
264 insn
= rtx_alloc (INSN
);
265 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
266 PATTERN (insn
) = pat
;
268 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
269 mode
= (enum machine_mode
) ((int) mode
+ 1))
273 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
274 PUT_MODE (mem
, mode
);
275 PUT_MODE (mem1
, mode
);
276 PUT_MODE (reg
, mode
);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
282 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
283 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
286 if (! HARD_REGNO_MODE_OK (regno
, mode
))
292 SET_DEST (pat
) = reg
;
293 if (recog (pat
, insn
, &num_clobbers
) >= 0)
294 direct_load
[(int) mode
] = 1;
296 SET_SRC (pat
) = mem1
;
297 SET_DEST (pat
) = reg
;
298 if (recog (pat
, insn
, &num_clobbers
) >= 0)
299 direct_load
[(int) mode
] = 1;
302 SET_DEST (pat
) = mem
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_store
[(int) mode
] = 1;
307 SET_DEST (pat
) = mem1
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_store
[(int) mode
] = 1;
313 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
315 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
316 mode
= GET_MODE_WIDER_MODE (mode
))
318 enum machine_mode srcmode
;
319 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
320 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
324 ic
= can_extend_p (mode
, srcmode
, 0);
325 if (ic
== CODE_FOR_nothing
)
328 PUT_MODE (mem
, srcmode
);
330 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
331 float_extend_from_mem
[mode
][srcmode
] = true;
336 /* This is run at the start of compiling a function. */
341 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
344 /* Small sanity check that the queue is empty at the end of a function. */
347 finish_expr_for_function (void)
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
356 /* Queue up to increment (or change) VAR later. BODY says how:
357 BODY should be the same thing you would pass to emit_insn
358 to increment right away. It will go to emit_insn later on.
360 The value is a QUEUED expression to be used in place of VAR
361 where you want to guarantee the pre-incrementation value of VAR. */
364 enqueue_insn (rtx var
, rtx body
)
366 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
367 body
, pending_chain
);
368 return pending_chain
;
371 /* Use protect_from_queue to convert a QUEUED expression
372 into something that you can put immediately into an instruction.
373 If the queued incrementation has not happened yet,
374 protect_from_queue returns the variable itself.
375 If the incrementation has happened, protect_from_queue returns a temp
376 that contains a copy of the old value of the variable.
378 Any time an rtx which might possibly be a QUEUED is to be put
379 into an instruction, it must be passed through protect_from_queue first.
380 QUEUED expressions are not meaningful in instructions.
382 Do not pass a value through protect_from_queue and then hold
383 on to it for a while before putting it in an instruction!
384 If the queue is flushed in between, incorrect code will result. */
387 protect_from_queue (rtx x
, int modify
)
389 RTX_CODE code
= GET_CODE (x
);
391 #if 0 /* A QUEUED can hang around after the queue is forced out. */
392 /* Shortcut for most common case. */
393 if (pending_chain
== 0)
399 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
400 use of autoincrement. Make a copy of the contents of the memory
401 location rather than a copy of the address, but not if the value is
402 of mode BLKmode. Don't modify X in place since it might be
404 if (code
== MEM
&& GET_MODE (x
) != BLKmode
405 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
408 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
412 rtx temp
= gen_reg_rtx (GET_MODE (x
));
414 emit_insn_before (gen_move_insn (temp
, new),
419 /* Copy the address into a pseudo, so that the returned value
420 remains correct across calls to emit_queue. */
421 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
424 /* Otherwise, recursively protect the subexpressions of all
425 the kinds of rtx's that can contain a QUEUED. */
428 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
429 if (tem
!= XEXP (x
, 0))
435 else if (code
== PLUS
|| code
== MULT
)
437 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
438 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
439 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
448 /* If the increment has not happened, use the variable itself. Copy it
449 into a new pseudo so that the value remains correct across calls to
451 if (QUEUED_INSN (x
) == 0)
452 return copy_to_reg (QUEUED_VAR (x
));
453 /* If the increment has happened and a pre-increment copy exists,
455 if (QUEUED_COPY (x
) != 0)
456 return QUEUED_COPY (x
);
457 /* The increment has happened but we haven't set up a pre-increment copy.
458 Set one up now, and use it. */
459 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
460 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
462 return QUEUED_COPY (x
);
465 /* Return nonzero if X contains a QUEUED expression:
466 if it contains anything that will be altered by a queued increment.
467 We handle only combinations of MEM, PLUS, MINUS and MULT operators
468 since memory addresses generally contain only those. */
471 queued_subexp_p (rtx x
)
473 enum rtx_code code
= GET_CODE (x
);
479 return queued_subexp_p (XEXP (x
, 0));
483 return (queued_subexp_p (XEXP (x
, 0))
484 || queued_subexp_p (XEXP (x
, 1)));
490 /* Perform all the pending incrementations. */
496 while ((p
= pending_chain
))
498 rtx body
= QUEUED_BODY (p
);
500 switch (GET_CODE (body
))
508 QUEUED_INSN (p
) = body
;
512 #ifdef ENABLE_CHECKING
519 QUEUED_INSN (p
) = emit_insn (body
);
523 pending_chain
= QUEUED_NEXT (p
);
527 /* Copy data from FROM to TO, where the machine modes are not the same.
528 Both modes may be integer, or both may be floating.
529 UNSIGNEDP should be nonzero if FROM is an unsigned type.
530 This causes zero-extension instead of sign-extension. */
533 convert_move (rtx to
, rtx from
, int unsignedp
)
535 enum machine_mode to_mode
= GET_MODE (to
);
536 enum machine_mode from_mode
= GET_MODE (from
);
537 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
538 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
542 /* rtx code for making an equivalent value. */
543 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
544 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
546 to
= protect_from_queue (to
, 1);
547 from
= protect_from_queue (from
, 0);
549 if (to_real
!= from_real
)
552 /* If FROM is a SUBREG that indicates that we have already done at least
553 the required extension, strip it. We don't handle such SUBREGs as
556 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
557 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
558 >= GET_MODE_SIZE (to_mode
))
559 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
560 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
562 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
565 if (to_mode
== from_mode
566 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
568 emit_move_insn (to
, from
);
572 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
574 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
577 if (VECTOR_MODE_P (to_mode
))
578 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
580 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
582 emit_move_insn (to
, from
);
586 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
588 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
589 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
593 if (to_real
!= from_real
)
600 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
602 /* Try converting directly if the insn is supported. */
603 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
606 emit_unop_insn (code
, to
, from
, UNKNOWN
);
611 #ifdef HAVE_trunchfqf2
612 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
614 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
618 #ifdef HAVE_trunctqfqf2
619 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
621 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
625 #ifdef HAVE_truncsfqf2
626 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
628 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
632 #ifdef HAVE_truncdfqf2
633 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
635 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
639 #ifdef HAVE_truncxfqf2
640 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
642 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
646 #ifdef HAVE_trunctfqf2
647 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
649 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
654 #ifdef HAVE_trunctqfhf2
655 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
657 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
661 #ifdef HAVE_truncsfhf2
662 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
664 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
668 #ifdef HAVE_truncdfhf2
669 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
671 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
675 #ifdef HAVE_truncxfhf2
676 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
678 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
682 #ifdef HAVE_trunctfhf2
683 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
685 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
690 #ifdef HAVE_truncsftqf2
691 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
693 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
697 #ifdef HAVE_truncdftqf2
698 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
700 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
704 #ifdef HAVE_truncxftqf2
705 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
707 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
711 #ifdef HAVE_trunctftqf2
712 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
714 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
719 #ifdef HAVE_truncdfsf2
720 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
722 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
726 #ifdef HAVE_truncxfsf2
727 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
729 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
733 #ifdef HAVE_trunctfsf2
734 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
736 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
740 #ifdef HAVE_truncxfdf2
741 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
743 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
747 #ifdef HAVE_trunctfdf2
748 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
750 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
762 libcall
= extendsfdf2_libfunc
;
766 libcall
= extendsfxf2_libfunc
;
770 libcall
= extendsftf2_libfunc
;
782 libcall
= truncdfsf2_libfunc
;
786 libcall
= extenddfxf2_libfunc
;
790 libcall
= extenddftf2_libfunc
;
802 libcall
= truncxfsf2_libfunc
;
806 libcall
= truncxfdf2_libfunc
;
818 libcall
= trunctfsf2_libfunc
;
822 libcall
= trunctfdf2_libfunc
;
834 if (libcall
== (rtx
) 0)
835 /* This conversion is not implemented yet. */
839 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
841 insns
= get_insns ();
843 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
848 /* Now both modes are integers. */
850 /* Handle expanding beyond a word. */
851 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
852 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
859 enum machine_mode lowpart_mode
;
860 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
862 /* Try converting directly if the insn is supported. */
863 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
866 /* If FROM is a SUBREG, put it into a register. Do this
867 so that we always generate the same set of insns for
868 better cse'ing; if an intermediate assignment occurred,
869 we won't be doing the operation directly on the SUBREG. */
870 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
871 from
= force_reg (from_mode
, from
);
872 emit_unop_insn (code
, to
, from
, equiv_code
);
875 /* Next, try converting via full word. */
876 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
877 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
878 != CODE_FOR_nothing
))
880 if (GET_CODE (to
) == REG
)
881 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
882 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
883 emit_unop_insn (code
, to
,
884 gen_lowpart (word_mode
, to
), equiv_code
);
888 /* No special multiword conversion insn; do it by hand. */
891 /* Since we will turn this into a no conflict block, we must ensure
892 that the source does not overlap the target. */
894 if (reg_overlap_mentioned_p (to
, from
))
895 from
= force_reg (from_mode
, from
);
897 /* Get a copy of FROM widened to a word, if necessary. */
898 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
899 lowpart_mode
= word_mode
;
901 lowpart_mode
= from_mode
;
903 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
905 lowpart
= gen_lowpart (lowpart_mode
, to
);
906 emit_move_insn (lowpart
, lowfrom
);
908 /* Compute the value to put in each remaining word. */
910 fill_value
= const0_rtx
;
915 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
916 && STORE_FLAG_VALUE
== -1)
918 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
920 fill_value
= gen_reg_rtx (word_mode
);
921 emit_insn (gen_slt (fill_value
));
927 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
928 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
930 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
934 /* Fill the remaining words. */
935 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
937 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
938 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
943 if (fill_value
!= subword
)
944 emit_move_insn (subword
, fill_value
);
947 insns
= get_insns ();
950 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
951 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
955 /* Truncating multi-word to a word or less. */
956 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
957 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
959 if (!((GET_CODE (from
) == MEM
960 && ! MEM_VOLATILE_P (from
)
961 && direct_load
[(int) to_mode
]
962 && ! mode_dependent_address_p (XEXP (from
, 0)))
963 || GET_CODE (from
) == REG
964 || GET_CODE (from
) == SUBREG
))
965 from
= force_reg (from_mode
, from
);
966 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
970 /* Handle pointer conversion. */ /* SPEE 900220. */
971 if (to_mode
== PQImode
)
973 if (from_mode
!= QImode
)
974 from
= convert_to_mode (QImode
, from
, unsignedp
);
976 #ifdef HAVE_truncqipqi2
977 if (HAVE_truncqipqi2
)
979 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
982 #endif /* HAVE_truncqipqi2 */
986 if (from_mode
== PQImode
)
988 if (to_mode
!= QImode
)
990 from
= convert_to_mode (QImode
, from
, unsignedp
);
995 #ifdef HAVE_extendpqiqi2
996 if (HAVE_extendpqiqi2
)
998 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1001 #endif /* HAVE_extendpqiqi2 */
1006 if (to_mode
== PSImode
)
1008 if (from_mode
!= SImode
)
1009 from
= convert_to_mode (SImode
, from
, unsignedp
);
1011 #ifdef HAVE_truncsipsi2
1012 if (HAVE_truncsipsi2
)
1014 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1017 #endif /* HAVE_truncsipsi2 */
1021 if (from_mode
== PSImode
)
1023 if (to_mode
!= SImode
)
1025 from
= convert_to_mode (SImode
, from
, unsignedp
);
1030 #ifdef HAVE_extendpsisi2
1031 if (! unsignedp
&& HAVE_extendpsisi2
)
1033 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1036 #endif /* HAVE_extendpsisi2 */
1037 #ifdef HAVE_zero_extendpsisi2
1038 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1040 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1043 #endif /* HAVE_zero_extendpsisi2 */
1048 if (to_mode
== PDImode
)
1050 if (from_mode
!= DImode
)
1051 from
= convert_to_mode (DImode
, from
, unsignedp
);
1053 #ifdef HAVE_truncdipdi2
1054 if (HAVE_truncdipdi2
)
1056 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1059 #endif /* HAVE_truncdipdi2 */
1063 if (from_mode
== PDImode
)
1065 if (to_mode
!= DImode
)
1067 from
= convert_to_mode (DImode
, from
, unsignedp
);
1072 #ifdef HAVE_extendpdidi2
1073 if (HAVE_extendpdidi2
)
1075 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1078 #endif /* HAVE_extendpdidi2 */
1083 /* Now follow all the conversions between integers
1084 no more than a word long. */
1086 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1087 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1089 GET_MODE_BITSIZE (from_mode
)))
1091 if (!((GET_CODE (from
) == MEM
1092 && ! MEM_VOLATILE_P (from
)
1093 && direct_load
[(int) to_mode
]
1094 && ! mode_dependent_address_p (XEXP (from
, 0)))
1095 || GET_CODE (from
) == REG
1096 || GET_CODE (from
) == SUBREG
))
1097 from
= force_reg (from_mode
, from
);
1098 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1099 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1100 from
= copy_to_reg (from
);
1101 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1105 /* Handle extension. */
1106 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1108 /* Convert directly if that works. */
1109 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1110 != CODE_FOR_nothing
)
1113 from
= force_not_mem (from
);
1115 emit_unop_insn (code
, to
, from
, equiv_code
);
1120 enum machine_mode intermediate
;
1124 /* Search for a mode to convert via. */
1125 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1126 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1127 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1128 != CODE_FOR_nothing
)
1129 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1130 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1131 GET_MODE_BITSIZE (intermediate
))))
1132 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1133 != CODE_FOR_nothing
))
1135 convert_move (to
, convert_to_mode (intermediate
, from
,
1136 unsignedp
), unsignedp
);
1140 /* No suitable intermediate mode.
1141 Generate what we need with shifts. */
1142 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1143 - GET_MODE_BITSIZE (from_mode
), 0);
1144 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1145 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1147 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1150 emit_move_insn (to
, tmp
);
1155 /* Support special truncate insns for certain modes. */
1157 if (from_mode
== DImode
&& to_mode
== SImode
)
1159 #ifdef HAVE_truncdisi2
1160 if (HAVE_truncdisi2
)
1162 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1166 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1170 if (from_mode
== DImode
&& to_mode
== HImode
)
1172 #ifdef HAVE_truncdihi2
1173 if (HAVE_truncdihi2
)
1175 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1179 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1183 if (from_mode
== DImode
&& to_mode
== QImode
)
1185 #ifdef HAVE_truncdiqi2
1186 if (HAVE_truncdiqi2
)
1188 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1192 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1196 if (from_mode
== SImode
&& to_mode
== HImode
)
1198 #ifdef HAVE_truncsihi2
1199 if (HAVE_truncsihi2
)
1201 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1205 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1209 if (from_mode
== SImode
&& to_mode
== QImode
)
1211 #ifdef HAVE_truncsiqi2
1212 if (HAVE_truncsiqi2
)
1214 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1218 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1222 if (from_mode
== HImode
&& to_mode
== QImode
)
1224 #ifdef HAVE_trunchiqi2
1225 if (HAVE_trunchiqi2
)
1227 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1231 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1235 if (from_mode
== TImode
&& to_mode
== DImode
)
1237 #ifdef HAVE_trunctidi2
1238 if (HAVE_trunctidi2
)
1240 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1244 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1248 if (from_mode
== TImode
&& to_mode
== SImode
)
1250 #ifdef HAVE_trunctisi2
1251 if (HAVE_trunctisi2
)
1253 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1257 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1261 if (from_mode
== TImode
&& to_mode
== HImode
)
1263 #ifdef HAVE_trunctihi2
1264 if (HAVE_trunctihi2
)
1266 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1270 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1274 if (from_mode
== TImode
&& to_mode
== QImode
)
1276 #ifdef HAVE_trunctiqi2
1277 if (HAVE_trunctiqi2
)
1279 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1283 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1287 /* Handle truncation of volatile memrefs, and so on;
1288 the things that couldn't be truncated directly,
1289 and for which there was no special instruction. */
1290 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1292 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1293 emit_move_insn (to
, temp
);
1297 /* Mode combination is not recognized. */
1301 /* Return an rtx for a value that would result
1302 from converting X to mode MODE.
1303 Both X and MODE may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1305 This can be done by referring to a part of X in place
1306 or by copying to a new temporary with conversion.
1308 This function *must not* call protect_from_queue
1309 except when putting X into an insn (in which case convert_move does it). */
1312 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
1314 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1317 /* Return an rtx for a value that would result
1318 from converting X from mode OLDMODE to mode MODE.
1319 Both modes may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1322 This can be done by referring to a part of X in place
1323 or by copying to a new temporary with conversion.
1325 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327 This function *must not* call protect_from_queue
1328 except when putting X into an insn (in which case convert_move does it). */
1331 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
1335 /* If FROM is a SUBREG that indicates that we have already done at least
1336 the required extension, strip it. */
1338 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1339 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1340 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1341 x
= gen_lowpart (mode
, x
);
1343 if (GET_MODE (x
) != VOIDmode
)
1344 oldmode
= GET_MODE (x
);
1346 if (mode
== oldmode
)
1349 /* There is one case that we must handle specially: If we are converting
1350 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1351 we are to interpret the constant as unsigned, gen_lowpart will do
1352 the wrong if the constant appears negative. What we want to do is
1353 make the high-order word of the constant zero, not all ones. */
1355 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1356 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1357 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1359 HOST_WIDE_INT val
= INTVAL (x
);
1361 if (oldmode
!= VOIDmode
1362 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1364 int width
= GET_MODE_BITSIZE (oldmode
);
1366 /* We need to zero extend VAL. */
1367 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1370 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1373 /* We can do this with a gen_lowpart if both desired and current modes
1374 are integer, and this is either a constant integer, a register, or a
1375 non-volatile MEM. Except for the constant case where MODE is no
1376 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1378 if ((GET_CODE (x
) == CONST_INT
1379 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1380 || (GET_MODE_CLASS (mode
) == MODE_INT
1381 && GET_MODE_CLASS (oldmode
) == MODE_INT
1382 && (GET_CODE (x
) == CONST_DOUBLE
1383 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1384 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1385 && direct_load
[(int) mode
])
1386 || (GET_CODE (x
) == REG
1387 && (! HARD_REGISTER_P (x
)
1388 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
1389 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1390 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1392 /* ?? If we don't know OLDMODE, we have to assume here that
1393 X does not need sign- or zero-extension. This may not be
1394 the case, but it's the best we can do. */
1395 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1396 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1398 HOST_WIDE_INT val
= INTVAL (x
);
1399 int width
= GET_MODE_BITSIZE (oldmode
);
1401 /* We must sign or zero-extend in this case. Start by
1402 zero-extending, then sign extend if we need to. */
1403 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1405 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1406 val
|= (HOST_WIDE_INT
) (-1) << width
;
1408 return gen_int_mode (val
, mode
);
1411 return gen_lowpart (mode
, x
);
1414 /* Converting from integer constant into mode is always equivalent to an
1415 subreg operation. */
1416 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1418 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1420 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1423 temp
= gen_reg_rtx (mode
);
1424 convert_move (temp
, x
, unsignedp
);
1428 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1429 store efficiently. Due to internal GCC limitations, this is
1430 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1431 for an immediate constant. */
1433 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1435 /* Determine whether the LEN bytes can be moved by using several move
1436 instructions. Return nonzero if a call to move_by_pieces should
1440 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1441 unsigned int align ATTRIBUTE_UNUSED
)
1443 return MOVE_BY_PIECES_P (len
, align
);
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1453 ALIGN is maximum stack alignment we can assume.
1455 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1456 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1460 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1461 unsigned int align
, int endp
)
1463 struct move_by_pieces data
;
1464 rtx to_addr
, from_addr
= XEXP (from
, 0);
1465 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1466 enum machine_mode mode
= VOIDmode
, tmode
;
1467 enum insn_code icode
;
1469 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1472 data
.from_addr
= from_addr
;
1475 to_addr
= XEXP (to
, 0);
1478 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1479 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1481 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1488 #ifdef STACK_GROWS_DOWNWARD
1494 data
.to_addr
= to_addr
;
1497 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1498 || GET_CODE (from_addr
) == POST_INC
1499 || GET_CODE (from_addr
) == POST_DEC
);
1501 data
.explicit_inc_from
= 0;
1502 data
.explicit_inc_to
= 0;
1503 if (data
.reverse
) data
.offset
= len
;
1506 /* If copying requires more than two move insns,
1507 copy addresses to registers (to make displacements shorter)
1508 and use post-increment if available. */
1509 if (!(data
.autinc_from
&& data
.autinc_to
)
1510 && move_by_pieces_ninsns (len
, align
) > 2)
1512 /* Find the mode of the largest move... */
1513 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1514 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1515 if (GET_MODE_SIZE (tmode
) < max_size
)
1518 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1520 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1521 data
.autinc_from
= 1;
1522 data
.explicit_inc_from
= -1;
1524 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1526 data
.from_addr
= copy_addr_to_reg (from_addr
);
1527 data
.autinc_from
= 1;
1528 data
.explicit_inc_from
= 1;
1530 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1531 data
.from_addr
= copy_addr_to_reg (from_addr
);
1532 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1534 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1536 data
.explicit_inc_to
= -1;
1538 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1540 data
.to_addr
= copy_addr_to_reg (to_addr
);
1542 data
.explicit_inc_to
= 1;
1544 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1545 data
.to_addr
= copy_addr_to_reg (to_addr
);
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1549 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1550 align
= MOVE_MAX
* BITS_PER_UNIT
;
1552 /* First move what we can in the largest integer mode, then go to
1553 successively smaller modes. */
1555 while (max_size
> 1)
1557 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1558 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1559 if (GET_MODE_SIZE (tmode
) < max_size
)
1562 if (mode
== VOIDmode
)
1565 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1566 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1567 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1569 max_size
= GET_MODE_SIZE (mode
);
1572 /* The code above should have handled everything. */
1586 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1587 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1589 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1592 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1599 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1607 /* Return number of insns required to move L bytes by pieces.
1608 ALIGN (in bits) is maximum alignment we can assume. */
1610 static unsigned HOST_WIDE_INT
1611 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1613 unsigned HOST_WIDE_INT n_insns
= 0;
1614 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1616 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1617 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1618 align
= MOVE_MAX
* BITS_PER_UNIT
;
1620 while (max_size
> 1)
1622 enum machine_mode mode
= VOIDmode
, tmode
;
1623 enum insn_code icode
;
1625 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1626 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1627 if (GET_MODE_SIZE (tmode
) < max_size
)
1630 if (mode
== VOIDmode
)
1633 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1634 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1635 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1637 max_size
= GET_MODE_SIZE (mode
);
1645 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1646 with move instructions for mode MODE. GENFUN is the gen_... function
1647 to make a move insn for that mode. DATA has all the other info. */
1650 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1651 struct move_by_pieces
*data
)
1653 unsigned int size
= GET_MODE_SIZE (mode
);
1654 rtx to1
= NULL_RTX
, from1
;
1656 while (data
->len
>= size
)
1659 data
->offset
-= size
;
1663 if (data
->autinc_to
)
1664 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1667 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1670 if (data
->autinc_from
)
1671 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1674 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1676 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1677 emit_insn (gen_add2_insn (data
->to_addr
,
1678 GEN_INT (-(HOST_WIDE_INT
)size
)));
1679 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1680 emit_insn (gen_add2_insn (data
->from_addr
,
1681 GEN_INT (-(HOST_WIDE_INT
)size
)));
1684 emit_insn ((*genfun
) (to1
, from1
));
1687 #ifdef PUSH_ROUNDING
1688 emit_single_push_insn (mode
, from1
, NULL
);
1694 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1695 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1696 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1697 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1699 if (! data
->reverse
)
1700 data
->offset
+= size
;
1706 /* Emit code to move a block Y to a block X. This may be done with
1707 string-move instructions, with multiple scalar move instructions,
1708 or with a library call.
1710 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1711 SIZE is an rtx that says how long they are.
1712 ALIGN is the maximum alignment we can assume they have.
1713 METHOD describes what kind of copy this is, and what mechanisms may be used.
1715 Return the address of the new block, if memcpy is called and returns it,
1719 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1727 case BLOCK_OP_NORMAL
:
1728 may_use_call
= true;
1731 case BLOCK_OP_CALL_PARM
:
1732 may_use_call
= block_move_libcall_safe_for_call_parm ();
1734 /* Make inhibit_defer_pop nonzero around the library call
1735 to force it to pop the arguments right away. */
1739 case BLOCK_OP_NO_LIBCALL
:
1740 may_use_call
= false;
1747 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1749 if (GET_MODE (x
) != BLKmode
)
1751 if (GET_MODE (y
) != BLKmode
)
1754 x
= protect_from_queue (x
, 1);
1755 y
= protect_from_queue (y
, 0);
1756 size
= protect_from_queue (size
, 0);
1758 if (GET_CODE (x
) != MEM
)
1760 if (GET_CODE (y
) != MEM
)
1765 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1766 can be incorrect is coming from __builtin_memcpy. */
1767 if (GET_CODE (size
) == CONST_INT
)
1769 if (INTVAL (size
) == 0)
1772 x
= shallow_copy_rtx (x
);
1773 y
= shallow_copy_rtx (y
);
1774 set_mem_size (x
, size
);
1775 set_mem_size (y
, size
);
1778 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1779 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1780 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1782 else if (may_use_call
)
1783 retval
= emit_block_move_via_libcall (x
, y
, size
);
1785 emit_block_move_via_loop (x
, y
, size
, align
);
1787 if (method
== BLOCK_OP_CALL_PARM
)
1793 /* A subroutine of emit_block_move. Returns true if calling the
1794 block move libcall will not clobber any parameters which may have
1795 already been placed on the stack. */
1798 block_move_libcall_safe_for_call_parm (void)
1804 /* Check to see whether memcpy takes all register arguments. */
1806 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1807 } takes_regs
= takes_regs_uninit
;
1811 case takes_regs_uninit
:
1813 CUMULATIVE_ARGS args_so_far
;
1816 fn
= emit_block_move_libcall_fn (false);
1817 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1819 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1820 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1822 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1823 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1824 if (!tmp
|| !REG_P (tmp
))
1825 goto fail_takes_regs
;
1826 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1827 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1829 goto fail_takes_regs
;
1831 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1834 takes_regs
= takes_regs_yes
;
1837 case takes_regs_yes
:
1841 takes_regs
= takes_regs_no
;
1852 /* A subroutine of emit_block_move. Expand a movstr pattern;
1853 return true if successful. */
1856 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1858 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1859 enum machine_mode mode
;
1861 /* Since this is a move insn, we don't care about volatility. */
1864 /* Try the most limited insn first, because there's no point
1865 including more than one in the machine description unless
1866 the more limited one has some advantage. */
1868 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1869 mode
= GET_MODE_WIDER_MODE (mode
))
1871 enum insn_code code
= movstr_optab
[(int) mode
];
1872 insn_operand_predicate_fn pred
;
1874 if (code
!= CODE_FOR_nothing
1875 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1876 here because if SIZE is less than the mode mask, as it is
1877 returned by the macro, it will definitely be less than the
1878 actual mode mask. */
1879 && ((GET_CODE (size
) == CONST_INT
1880 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1881 <= (GET_MODE_MASK (mode
) >> 1)))
1882 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1883 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1884 || (*pred
) (x
, BLKmode
))
1885 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1886 || (*pred
) (y
, BLKmode
))
1887 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1888 || (*pred
) (opalign
, VOIDmode
)))
1891 rtx last
= get_last_insn ();
1894 op2
= convert_to_mode (mode
, size
, 1);
1895 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1896 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1897 op2
= copy_to_mode_reg (mode
, op2
);
1899 /* ??? When called via emit_block_move_for_call, it'd be
1900 nice if there were some way to inform the backend, so
1901 that it doesn't fail the expansion because it thinks
1902 emitting the libcall would be more efficient. */
1904 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1912 delete_insns_since (last
);
1920 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1921 Return the return value from memcpy, 0 otherwise. */
1924 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1926 rtx dst_addr
, src_addr
;
1927 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1928 enum machine_mode size_mode
;
1931 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1933 It is unsafe to save the value generated by protect_from_queue and reuse
1934 it later. Consider what happens if emit_queue is called before the
1935 return value from protect_from_queue is used.
1937 Expansion of the CALL_EXPR below will call emit_queue before we are
1938 finished emitting RTL for argument setup. So if we are not careful we
1939 could get the wrong value for an argument.
1941 To avoid this problem we go ahead and emit code to copy the addresses of
1942 DST and SRC and SIZE into new pseudos. We can then place those new
1943 pseudos into an RTL_EXPR and use them later, even after a call to
1946 Note this is not strictly needed for library calls since they do not call
1947 emit_queue before loading their arguments. However, we may need to have
1948 library calls call emit_queue in the future since failing to do so could
1949 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1950 arguments in registers. */
1952 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1953 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1955 #ifdef POINTERS_EXTEND_UNSIGNED
1956 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1957 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1960 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1961 src_tree
= make_tree (ptr_type_node
, src_addr
);
1963 if (TARGET_MEM_FUNCTIONS
)
1964 size_mode
= TYPE_MODE (sizetype
);
1966 size_mode
= TYPE_MODE (unsigned_type_node
);
1968 size
= convert_to_mode (size_mode
, size
, 1);
1969 size
= copy_to_mode_reg (size_mode
, size
);
1971 /* It is incorrect to use the libcall calling conventions to call
1972 memcpy in this context. This could be a user call to memcpy and
1973 the user may wish to examine the return value from memcpy. For
1974 targets where libcalls and normal calls have different conventions
1975 for returning pointers, we could end up generating incorrect code.
1977 For convenience, we generate the call to bcopy this way as well. */
1979 if (TARGET_MEM_FUNCTIONS
)
1980 size_tree
= make_tree (sizetype
, size
);
1982 size_tree
= make_tree (unsigned_type_node
, size
);
1984 fn
= emit_block_move_libcall_fn (true);
1985 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1986 if (TARGET_MEM_FUNCTIONS
)
1988 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1989 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1993 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1994 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1997 /* Now we have to build up the CALL_EXPR itself. */
1998 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1999 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2000 call_expr
, arg_list
, NULL_TREE
);
2002 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2004 /* If we are initializing a readonly value, show the above call clobbered
2005 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2006 the delay slot scheduler might overlook conflicts and take nasty
2008 if (RTX_UNCHANGING_P (dst
))
2009 add_function_usage_to
2010 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
2011 gen_rtx_CLOBBER (VOIDmode
, dst
),
2014 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
2017 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2018 for the function we use for block copies. The first time FOR_CALL
2019 is true, we call assemble_external. */
2021 static GTY(()) tree block_move_fn
;
2024 init_block_move_fn (const char *asmspec
)
2030 if (TARGET_MEM_FUNCTIONS
)
2032 fn
= get_identifier ("memcpy");
2033 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2034 const_ptr_type_node
, sizetype
,
2039 fn
= get_identifier ("bcopy");
2040 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2041 ptr_type_node
, unsigned_type_node
,
2045 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2046 DECL_EXTERNAL (fn
) = 1;
2047 TREE_PUBLIC (fn
) = 1;
2048 DECL_ARTIFICIAL (fn
) = 1;
2049 TREE_NOTHROW (fn
) = 1;
2056 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
2057 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
2062 emit_block_move_libcall_fn (int for_call
)
2064 static bool emitted_extern
;
2067 init_block_move_fn (NULL
);
2069 if (for_call
&& !emitted_extern
)
2071 emitted_extern
= true;
2072 make_decl_rtl (block_move_fn
, NULL
);
2073 assemble_external (block_move_fn
);
2076 return block_move_fn
;
2079 /* A subroutine of emit_block_move. Copy the data via an explicit
2080 loop. This is used only when libcalls are forbidden. */
2081 /* ??? It'd be nice to copy in hunks larger than QImode. */
2084 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
2085 unsigned int align ATTRIBUTE_UNUSED
)
2087 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2088 enum machine_mode iter_mode
;
2090 iter_mode
= GET_MODE (size
);
2091 if (iter_mode
== VOIDmode
)
2092 iter_mode
= word_mode
;
2094 top_label
= gen_label_rtx ();
2095 cmp_label
= gen_label_rtx ();
2096 iter
= gen_reg_rtx (iter_mode
);
2098 emit_move_insn (iter
, const0_rtx
);
2100 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2101 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2102 do_pending_stack_adjust ();
2104 emit_note (NOTE_INSN_LOOP_BEG
);
2106 emit_jump (cmp_label
);
2107 emit_label (top_label
);
2109 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2110 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2111 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2112 x
= change_address (x
, QImode
, x_addr
);
2113 y
= change_address (y
, QImode
, y_addr
);
2115 emit_move_insn (x
, y
);
2117 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2118 true, OPTAB_LIB_WIDEN
);
2120 emit_move_insn (iter
, tmp
);
2122 emit_note (NOTE_INSN_LOOP_CONT
);
2123 emit_label (cmp_label
);
2125 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2128 emit_note (NOTE_INSN_LOOP_END
);
2131 /* Copy all or part of a value X into registers starting at REGNO.
2132 The number of registers to be filled is NREGS. */
2135 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
2138 #ifdef HAVE_load_multiple
2146 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2147 x
= validize_mem (force_const_mem (mode
, x
));
2149 /* See if the machine can do this with a load multiple insn. */
2150 #ifdef HAVE_load_multiple
2151 if (HAVE_load_multiple
)
2153 last
= get_last_insn ();
2154 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2162 delete_insns_since (last
);
2166 for (i
= 0; i
< nregs
; i
++)
2167 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2168 operand_subword_force (x
, i
, mode
));
2171 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2172 The number of registers to be filled is NREGS. */
2175 move_block_from_reg (int regno
, rtx x
, int nregs
)
2182 /* See if the machine can do this with a store multiple insn. */
2183 #ifdef HAVE_store_multiple
2184 if (HAVE_store_multiple
)
2186 rtx last
= get_last_insn ();
2187 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2195 delete_insns_since (last
);
2199 for (i
= 0; i
< nregs
; i
++)
2201 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2206 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2210 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2211 ORIG, where ORIG is a non-consecutive group of registers represented by
2212 a PARALLEL. The clone is identical to the original except in that the
2213 original set of registers is replaced by a new set of pseudo registers.
2214 The new set has the same modes as the original set. */
2217 gen_group_rtx (rtx orig
)
2222 if (GET_CODE (orig
) != PARALLEL
)
2225 length
= XVECLEN (orig
, 0);
2226 tmps
= alloca (sizeof (rtx
) * length
);
2228 /* Skip a NULL entry in first slot. */
2229 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2234 for (; i
< length
; i
++)
2236 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2237 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2239 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2242 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2245 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2246 where DST is non-consecutive registers represented by a PARALLEL.
2247 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2251 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2256 if (GET_CODE (dst
) != PARALLEL
)
2259 /* Check for a NULL entry, used to indicate that the parameter goes
2260 both on the stack and in registers. */
2261 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2266 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2268 /* Process the pieces. */
2269 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2271 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2272 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2273 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2276 /* Handle trailing fragments that run over the size of the struct. */
2277 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2279 /* Arrange to shift the fragment to where it belongs.
2280 extract_bit_field loads to the lsb of the reg. */
2282 #ifdef BLOCK_REG_PADDING
2283 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2284 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2289 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2290 bytelen
= ssize
- bytepos
;
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2299 if (GET_CODE (orig_src
) != MEM
2300 && (!CONSTANT_P (orig_src
)
2301 || (GET_MODE (orig_src
) != mode
2302 && GET_MODE (orig_src
) != VOIDmode
)))
2304 if (GET_MODE (orig_src
) == VOIDmode
)
2305 src
= gen_reg_rtx (mode
);
2307 src
= gen_reg_rtx (GET_MODE (orig_src
));
2309 emit_move_insn (src
, orig_src
);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src
) == MEM
2314 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
2315 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2316 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2317 && bytelen
== GET_MODE_SIZE (mode
))
2319 tmps
[i
] = gen_reg_rtx (mode
);
2320 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2322 else if (GET_CODE (src
) == CONCAT
)
2324 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2325 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2327 if ((bytepos
== 0 && bytelen
== slen0
)
2328 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2330 /* The following assumes that the concatenated objects all
2331 have the same size. In this case, a simple calculation
2332 can be used to determine the object and the bit field
2334 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2335 if (! CONSTANT_P (tmps
[i
])
2336 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2337 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2338 (bytepos
% slen0
) * BITS_PER_UNIT
,
2339 1, NULL_RTX
, mode
, mode
, ssize
);
2341 else if (bytepos
== 0)
2343 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2344 emit_move_insn (mem
, src
);
2345 tmps
[i
] = adjust_address (mem
, mode
, 0);
2350 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2351 SIMD register, which is currently broken. While we get GCC
2352 to emit proper RTL for these cases, let's dump to memory. */
2353 else if (VECTOR_MODE_P (GET_MODE (dst
))
2354 && GET_CODE (src
) == REG
)
2356 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2359 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2360 emit_move_insn (mem
, src
);
2361 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2363 else if (CONSTANT_P (src
)
2364 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2367 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2368 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2372 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2373 tmps
[i
], 0, OPTAB_WIDEN
);
2378 /* Copy the extracted pieces into the proper (probable) hard regs. */
2379 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2380 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2383 /* Emit code to move a block SRC to block DST, where SRC and DST are
2384 non-consecutive groups of registers, each represented by a PARALLEL. */
2387 emit_group_move (rtx dst
, rtx src
)
2391 if (GET_CODE (src
) != PARALLEL
2392 || GET_CODE (dst
) != PARALLEL
2393 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2396 /* Skip first entry if NULL. */
2397 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2398 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2399 XEXP (XVECEXP (src
, 0, i
), 0));
2402 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2403 where SRC is non-consecutive registers represented by a PARALLEL.
2404 SSIZE represents the total size of block ORIG_DST, or -1 if not
2408 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2413 if (GET_CODE (src
) != PARALLEL
)
2416 /* Check for a NULL entry, used to indicate that the parameter goes
2417 both on the stack and in registers. */
2418 if (XEXP (XVECEXP (src
, 0, 0), 0))
2423 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2425 /* Copy the (probable) hard regs into pseudos. */
2426 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2428 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2429 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2430 emit_move_insn (tmps
[i
], reg
);
2434 /* If we won't be storing directly into memory, protect the real destination
2435 from strange tricks we might play. */
2437 if (GET_CODE (dst
) == PARALLEL
)
2441 /* We can get a PARALLEL dst if there is a conditional expression in
2442 a return statement. In that case, the dst and src are the same,
2443 so no action is necessary. */
2444 if (rtx_equal_p (dst
, src
))
2447 /* It is unclear if we can ever reach here, but we may as well handle
2448 it. Allocate a temporary, and split this into a store/load to/from
2451 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2452 emit_group_store (temp
, src
, type
, ssize
);
2453 emit_group_load (dst
, temp
, type
, ssize
);
2456 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2458 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2459 /* Make life a bit easier for combine. */
2460 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2463 /* Process the pieces. */
2464 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2466 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2467 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2468 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2471 /* Handle trailing fragments that run over the size of the struct. */
2472 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2474 /* store_bit_field always takes its value from the lsb.
2475 Move the fragment to the lsb if it's not already there. */
2477 #ifdef BLOCK_REG_PADDING
2478 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2479 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2485 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2486 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2487 tmps
[i
], 0, OPTAB_WIDEN
);
2489 bytelen
= ssize
- bytepos
;
2492 if (GET_CODE (dst
) == CONCAT
)
2494 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2495 dest
= XEXP (dst
, 0);
2496 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2498 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2499 dest
= XEXP (dst
, 1);
2501 else if (bytepos
== 0 && XVECLEN (src
, 0))
2503 dest
= assign_stack_temp (GET_MODE (dest
),
2504 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2505 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2514 /* Optimize the access just a bit. */
2515 if (GET_CODE (dest
) == MEM
2516 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2517 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2518 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2519 && bytelen
== GET_MODE_SIZE (mode
))
2520 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2522 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2523 mode
, tmps
[i
], ssize
);
2528 /* Copy from the pseudo into the (probable) hard reg. */
2529 if (orig_dst
!= dst
)
2530 emit_move_insn (orig_dst
, dst
);
2533 /* Generate code to copy a BLKmode object of TYPE out of a
2534 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2535 is null, a stack temporary is created. TGTBLK is returned.
2537 The primary purpose of this routine is to handle functions
2538 that return BLKmode structures in registers. Some machines
2539 (the PA for example) want to return all small structures
2540 in registers regardless of the structure's alignment. */
2543 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2545 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2546 rtx src
= NULL
, dst
= NULL
;
2547 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2548 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2552 tgtblk
= assign_temp (build_qualified_type (type
,
2554 | TYPE_QUAL_CONST
)),
2556 preserve_temp_slots (tgtblk
);
2559 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2560 into a new pseudo which is a full word. */
2562 if (GET_MODE (srcreg
) != BLKmode
2563 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2564 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2566 /* Structures whose size is not a multiple of a word are aligned
2567 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2568 machine, this means we must skip the empty high order bytes when
2569 calculating the bit offset. */
2570 if (BYTES_BIG_ENDIAN
2571 && bytes
% UNITS_PER_WORD
)
2572 big_endian_correction
2573 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2575 /* Copy the structure BITSIZE bites at a time.
2577 We could probably emit more efficient code for machines which do not use
2578 strict alignment, but it doesn't seem worth the effort at the current
2580 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2581 bitpos
< bytes
* BITS_PER_UNIT
;
2582 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2584 /* We need a new source operand each time xbitpos is on a
2585 word boundary and when xbitpos == big_endian_correction
2586 (the first time through). */
2587 if (xbitpos
% BITS_PER_WORD
== 0
2588 || xbitpos
== big_endian_correction
)
2589 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2592 /* We need a new destination operand each time bitpos is on
2594 if (bitpos
% BITS_PER_WORD
== 0)
2595 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2597 /* Use xbitpos for the source extraction (right justified) and
2598 xbitpos for the destination store (left justified). */
2599 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2600 extract_bit_field (src
, bitsize
,
2601 xbitpos
% BITS_PER_WORD
, 1,
2602 NULL_RTX
, word_mode
, word_mode
,
2610 /* Add a USE expression for REG to the (possibly empty) list pointed
2611 to by CALL_FUSAGE. REG must denote a hard register. */
2614 use_reg (rtx
*call_fusage
, rtx reg
)
2616 if (GET_CODE (reg
) != REG
2617 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2621 = gen_rtx_EXPR_LIST (VOIDmode
,
2622 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2625 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2626 starting at REGNO. All of these registers must be hard registers. */
2629 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2633 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2636 for (i
= 0; i
< nregs
; i
++)
2637 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2640 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2641 PARALLEL REGS. This is for calls that pass values in multiple
2642 non-contiguous locations. The Irix 6 ABI has examples of this. */
2645 use_group_regs (rtx
*call_fusage
, rtx regs
)
2649 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2651 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2653 /* A NULL entry means the parameter goes both on the stack and in
2654 registers. This can also be a MEM for targets that pass values
2655 partially on the stack and partially in registers. */
2656 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2657 use_reg (call_fusage
, reg
);
2662 /* Determine whether the LEN bytes generated by CONSTFUN can be
2663 stored to memory using several move instructions. CONSTFUNDATA is
2664 a pointer which will be passed as argument in every CONSTFUN call.
2665 ALIGN is maximum alignment we can assume. Return nonzero if a
2666 call to store_by_pieces should succeed. */
2669 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2670 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2671 void *constfundata
, unsigned int align
)
2673 unsigned HOST_WIDE_INT max_size
, l
;
2674 HOST_WIDE_INT offset
= 0;
2675 enum machine_mode mode
, tmode
;
2676 enum insn_code icode
;
2683 if (! STORE_BY_PIECES_P (len
, align
))
2686 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2687 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2688 align
= MOVE_MAX
* BITS_PER_UNIT
;
2690 /* We would first store what we can in the largest integer mode, then go to
2691 successively smaller modes. */
2694 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2699 max_size
= STORE_MAX_PIECES
+ 1;
2700 while (max_size
> 1)
2702 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2703 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2704 if (GET_MODE_SIZE (tmode
) < max_size
)
2707 if (mode
== VOIDmode
)
2710 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2711 if (icode
!= CODE_FOR_nothing
2712 && align
>= GET_MODE_ALIGNMENT (mode
))
2714 unsigned int size
= GET_MODE_SIZE (mode
);
2721 cst
= (*constfun
) (constfundata
, offset
, mode
);
2722 if (!LEGITIMATE_CONSTANT_P (cst
))
2732 max_size
= GET_MODE_SIZE (mode
);
2735 /* The code above should have handled everything. */
2743 /* Generate several move instructions to store LEN bytes generated by
2744 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2745 pointer which will be passed as argument in every CONSTFUN call.
2746 ALIGN is maximum alignment we can assume.
2747 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2748 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2752 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2753 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2754 void *constfundata
, unsigned int align
, int endp
)
2756 struct store_by_pieces data
;
2765 if (! STORE_BY_PIECES_P (len
, align
))
2767 to
= protect_from_queue (to
, 1);
2768 data
.constfun
= constfun
;
2769 data
.constfundata
= constfundata
;
2772 store_by_pieces_1 (&data
, align
);
2783 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2784 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2786 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2789 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2796 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2804 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2805 rtx with BLKmode). The caller must pass TO through protect_from_queue
2806 before calling. ALIGN is maximum alignment we can assume. */
2809 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2811 struct store_by_pieces data
;
2816 data
.constfun
= clear_by_pieces_1
;
2817 data
.constfundata
= NULL
;
2820 store_by_pieces_1 (&data
, align
);
2823 /* Callback routine for clear_by_pieces.
2824 Return const0_rtx unconditionally. */
2827 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2828 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2829 enum machine_mode mode ATTRIBUTE_UNUSED
)
2834 /* Subroutine of clear_by_pieces and store_by_pieces.
2835 Generate several move instructions to store LEN bytes of block TO. (A MEM
2836 rtx with BLKmode). The caller must pass TO through protect_from_queue
2837 before calling. ALIGN is maximum alignment we can assume. */
2840 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2841 unsigned int align ATTRIBUTE_UNUSED
)
2843 rtx to_addr
= XEXP (data
->to
, 0);
2844 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2845 enum machine_mode mode
= VOIDmode
, tmode
;
2846 enum insn_code icode
;
2849 data
->to_addr
= to_addr
;
2851 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2852 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2854 data
->explicit_inc_to
= 0;
2856 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2858 data
->offset
= data
->len
;
2860 /* If storing requires more than two move insns,
2861 copy addresses to registers (to make displacements shorter)
2862 and use post-increment if available. */
2863 if (!data
->autinc_to
2864 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2866 /* Determine the main mode we'll be using. */
2867 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2868 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2869 if (GET_MODE_SIZE (tmode
) < max_size
)
2872 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2874 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2875 data
->autinc_to
= 1;
2876 data
->explicit_inc_to
= -1;
2879 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2880 && ! data
->autinc_to
)
2882 data
->to_addr
= copy_addr_to_reg (to_addr
);
2883 data
->autinc_to
= 1;
2884 data
->explicit_inc_to
= 1;
2887 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2888 data
->to_addr
= copy_addr_to_reg (to_addr
);
2891 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2892 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2893 align
= MOVE_MAX
* BITS_PER_UNIT
;
2895 /* First store what we can in the largest integer mode, then go to
2896 successively smaller modes. */
2898 while (max_size
> 1)
2900 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2901 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2902 if (GET_MODE_SIZE (tmode
) < max_size
)
2905 if (mode
== VOIDmode
)
2908 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2909 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2910 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2912 max_size
= GET_MODE_SIZE (mode
);
2915 /* The code above should have handled everything. */
2920 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2921 with move instructions for mode MODE. GENFUN is the gen_... function
2922 to make a move insn for that mode. DATA has all the other info. */
2925 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2926 struct store_by_pieces
*data
)
2928 unsigned int size
= GET_MODE_SIZE (mode
);
2931 while (data
->len
>= size
)
2934 data
->offset
-= size
;
2936 if (data
->autinc_to
)
2937 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2940 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2942 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2943 emit_insn (gen_add2_insn (data
->to_addr
,
2944 GEN_INT (-(HOST_WIDE_INT
) size
)));
2946 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2947 emit_insn ((*genfun
) (to1
, cst
));
2949 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2950 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2952 if (! data
->reverse
)
2953 data
->offset
+= size
;
2959 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2960 its length in bytes. */
2963 clear_storage (rtx object
, rtx size
)
2966 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2967 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2969 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2970 just move a zero. Otherwise, do this a piece at a time. */
2971 if (GET_MODE (object
) != BLKmode
2972 && GET_CODE (size
) == CONST_INT
2973 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2974 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2977 object
= protect_from_queue (object
, 1);
2978 size
= protect_from_queue (size
, 0);
2980 if (size
== const0_rtx
)
2982 else if (GET_CODE (size
) == CONST_INT
2983 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2984 clear_by_pieces (object
, INTVAL (size
), align
);
2985 else if (clear_storage_via_clrstr (object
, size
, align
))
2988 retval
= clear_storage_via_libcall (object
, size
);
2994 /* A subroutine of clear_storage. Expand a clrstr pattern;
2995 return true if successful. */
2998 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
3000 /* Try the most limited insn first, because there's no point
3001 including more than one in the machine description unless
3002 the more limited one has some advantage. */
3004 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3005 enum machine_mode mode
;
3007 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3008 mode
= GET_MODE_WIDER_MODE (mode
))
3010 enum insn_code code
= clrstr_optab
[(int) mode
];
3011 insn_operand_predicate_fn pred
;
3013 if (code
!= CODE_FOR_nothing
3014 /* We don't need MODE to be narrower than
3015 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3016 the mode mask, as it is returned by the macro, it will
3017 definitely be less than the actual mode mask. */
3018 && ((GET_CODE (size
) == CONST_INT
3019 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3020 <= (GET_MODE_MASK (mode
) >> 1)))
3021 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3022 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
3023 || (*pred
) (object
, BLKmode
))
3024 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
3025 || (*pred
) (opalign
, VOIDmode
)))
3028 rtx last
= get_last_insn ();
3031 op1
= convert_to_mode (mode
, size
, 1);
3032 pred
= insn_data
[(int) code
].operand
[1].predicate
;
3033 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
3034 op1
= copy_to_mode_reg (mode
, op1
);
3036 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
3043 delete_insns_since (last
);
3050 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3051 Return the return value of memset, 0 otherwise. */
3054 clear_storage_via_libcall (rtx object
, rtx size
)
3056 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3057 enum machine_mode size_mode
;
3060 /* OBJECT or SIZE may have been passed through protect_from_queue.
3062 It is unsafe to save the value generated by protect_from_queue
3063 and reuse it later. Consider what happens if emit_queue is
3064 called before the return value from protect_from_queue is used.
3066 Expansion of the CALL_EXPR below will call emit_queue before
3067 we are finished emitting RTL for argument setup. So if we are
3068 not careful we could get the wrong value for an argument.
3070 To avoid this problem we go ahead and emit code to copy OBJECT
3071 and SIZE into new pseudos. We can then place those new pseudos
3072 into an RTL_EXPR and use them later, even after a call to
3075 Note this is not strictly needed for library calls since they
3076 do not call emit_queue before loading their arguments. However,
3077 we may need to have library calls call emit_queue in the future
3078 since failing to do so could cause problems for targets which
3079 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3081 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3083 if (TARGET_MEM_FUNCTIONS
)
3084 size_mode
= TYPE_MODE (sizetype
);
3086 size_mode
= TYPE_MODE (unsigned_type_node
);
3087 size
= convert_to_mode (size_mode
, size
, 1);
3088 size
= copy_to_mode_reg (size_mode
, size
);
3090 /* It is incorrect to use the libcall calling conventions to call
3091 memset in this context. This could be a user call to memset and
3092 the user may wish to examine the return value from memset. For
3093 targets where libcalls and normal calls have different conventions
3094 for returning pointers, we could end up generating incorrect code.
3096 For convenience, we generate the call to bzero this way as well. */
3098 object_tree
= make_tree (ptr_type_node
, object
);
3099 if (TARGET_MEM_FUNCTIONS
)
3100 size_tree
= make_tree (sizetype
, size
);
3102 size_tree
= make_tree (unsigned_type_node
, size
);
3104 fn
= clear_storage_libcall_fn (true);
3105 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3106 if (TARGET_MEM_FUNCTIONS
)
3107 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3108 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3110 /* Now we have to build up the CALL_EXPR itself. */
3111 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3112 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3113 call_expr
, arg_list
, NULL_TREE
);
3115 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3117 /* If we are initializing a readonly value, show the above call
3118 clobbered it. Otherwise, a load from it may erroneously be
3119 hoisted from a loop. */
3120 if (RTX_UNCHANGING_P (object
))
3121 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3123 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3126 /* A subroutine of clear_storage_via_libcall. Create the tree node
3127 for the function we use for block clears. The first time FOR_CALL
3128 is true, we call assemble_external. */
3130 static GTY(()) tree block_clear_fn
;
3133 init_block_clear_fn (const char *asmspec
)
3135 if (!block_clear_fn
)
3139 if (TARGET_MEM_FUNCTIONS
)
3141 fn
= get_identifier ("memset");
3142 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3143 integer_type_node
, sizetype
,
3148 fn
= get_identifier ("bzero");
3149 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3150 unsigned_type_node
, NULL_TREE
);
3153 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3154 DECL_EXTERNAL (fn
) = 1;
3155 TREE_PUBLIC (fn
) = 1;
3156 DECL_ARTIFICIAL (fn
) = 1;
3157 TREE_NOTHROW (fn
) = 1;
3159 block_clear_fn
= fn
;
3164 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
3165 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
3170 clear_storage_libcall_fn (int for_call
)
3172 static bool emitted_extern
;
3174 if (!block_clear_fn
)
3175 init_block_clear_fn (NULL
);
3177 if (for_call
&& !emitted_extern
)
3179 emitted_extern
= true;
3180 make_decl_rtl (block_clear_fn
, NULL
);
3181 assemble_external (block_clear_fn
);
3184 return block_clear_fn
;
3187 /* Generate code to copy Y into X.
3188 Both Y and X must have the same mode, except that
3189 Y can be a constant with VOIDmode.
3190 This mode cannot be BLKmode; use emit_block_move for that.
3192 Return the last instruction emitted. */
3195 emit_move_insn (rtx x
, rtx y
)
3197 enum machine_mode mode
= GET_MODE (x
);
3198 rtx y_cst
= NULL_RTX
;
3201 x
= protect_from_queue (x
, 1);
3202 y
= protect_from_queue (y
, 0);
3204 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3207 /* Never force constant_p_rtx to memory. */
3208 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3210 else if (CONSTANT_P (y
))
3213 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3214 && (last_insn
= compress_float_constant (x
, y
)))
3219 if (!LEGITIMATE_CONSTANT_P (y
))
3221 y
= force_const_mem (mode
, y
);
3223 /* If the target's cannot_force_const_mem prevented the spill,
3224 assume that the target's move expanders will also take care
3225 of the non-legitimate constant. */
3231 /* If X or Y are memory references, verify that their addresses are valid
3233 if (GET_CODE (x
) == MEM
3234 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3235 && ! push_operand (x
, GET_MODE (x
)))
3237 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3238 x
= validize_mem (x
);
3240 if (GET_CODE (y
) == MEM
3241 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3243 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3244 y
= validize_mem (y
);
3246 if (mode
== BLKmode
)
3249 last_insn
= emit_move_insn_1 (x
, y
);
3251 if (y_cst
&& GET_CODE (x
) == REG
3252 && (set
= single_set (last_insn
)) != NULL_RTX
3253 && SET_DEST (set
) == x
3254 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3255 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3260 /* Low level part of emit_move_insn.
3261 Called just like emit_move_insn, but assumes X and Y
3262 are basically valid. */
3265 emit_move_insn_1 (rtx x
, rtx y
)
3267 enum machine_mode mode
= GET_MODE (x
);
3268 enum machine_mode submode
;
3269 enum mode_class
class = GET_MODE_CLASS (mode
);
3271 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3274 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3276 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3278 /* Expand complex moves by moving real part and imag part, if possible. */
3279 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3280 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3281 && (mov_optab
->handlers
[(int) submode
].insn_code
3282 != CODE_FOR_nothing
))
3284 /* Don't split destination if it is a stack push. */
3285 int stack
= push_operand (x
, GET_MODE (x
));
3287 #ifdef PUSH_ROUNDING
3288 /* In case we output to the stack, but the size is smaller than the
3289 machine can push exactly, we need to use move instructions. */
3291 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3292 != GET_MODE_SIZE (submode
)))
3295 HOST_WIDE_INT offset1
, offset2
;
3297 /* Do not use anti_adjust_stack, since we don't want to update
3298 stack_pointer_delta. */
3299 temp
= expand_binop (Pmode
,
3300 #ifdef STACK_GROWS_DOWNWARD
3308 (GET_MODE_SIZE (GET_MODE (x
)))),
3309 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3311 if (temp
!= stack_pointer_rtx
)
3312 emit_move_insn (stack_pointer_rtx
, temp
);
3314 #ifdef STACK_GROWS_DOWNWARD
3316 offset2
= GET_MODE_SIZE (submode
);
3318 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3319 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3320 + GET_MODE_SIZE (submode
));
3323 emit_move_insn (change_address (x
, submode
,
3324 gen_rtx_PLUS (Pmode
,
3326 GEN_INT (offset1
))),
3327 gen_realpart (submode
, y
));
3328 emit_move_insn (change_address (x
, submode
,
3329 gen_rtx_PLUS (Pmode
,
3331 GEN_INT (offset2
))),
3332 gen_imagpart (submode
, y
));
3336 /* If this is a stack, push the highpart first, so it
3337 will be in the argument order.
3339 In that case, change_address is used only to convert
3340 the mode, not to change the address. */
3343 /* Note that the real part always precedes the imag part in memory
3344 regardless of machine's endianness. */
3345 #ifdef STACK_GROWS_DOWNWARD
3346 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3347 gen_imagpart (submode
, y
));
3348 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3349 gen_realpart (submode
, y
));
3351 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3352 gen_realpart (submode
, y
));
3353 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3354 gen_imagpart (submode
, y
));
3359 rtx realpart_x
, realpart_y
;
3360 rtx imagpart_x
, imagpart_y
;
3362 /* If this is a complex value with each part being smaller than a
3363 word, the usual calling sequence will likely pack the pieces into
3364 a single register. Unfortunately, SUBREG of hard registers only
3365 deals in terms of words, so we have a problem converting input
3366 arguments to the CONCAT of two registers that is used elsewhere
3367 for complex values. If this is before reload, we can copy it into
3368 memory and reload. FIXME, we should see about using extract and
3369 insert on integer registers, but complex short and complex char
3370 variables should be rarely used. */
3371 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3372 && (reload_in_progress
| reload_completed
) == 0)
3375 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3377 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3379 if (packed_dest_p
|| packed_src_p
)
3381 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3382 ? MODE_FLOAT
: MODE_INT
);
3384 enum machine_mode reg_mode
3385 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3387 if (reg_mode
!= BLKmode
)
3389 rtx mem
= assign_stack_temp (reg_mode
,
3390 GET_MODE_SIZE (mode
), 0);
3391 rtx cmem
= adjust_address (mem
, mode
, 0);
3394 = N_("function using short complex types cannot be inline");
3398 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3400 emit_move_insn_1 (cmem
, y
);
3401 return emit_move_insn_1 (sreg
, mem
);
3405 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3407 emit_move_insn_1 (mem
, sreg
);
3408 return emit_move_insn_1 (x
, cmem
);
3414 realpart_x
= gen_realpart (submode
, x
);
3415 realpart_y
= gen_realpart (submode
, y
);
3416 imagpart_x
= gen_imagpart (submode
, x
);
3417 imagpart_y
= gen_imagpart (submode
, y
);
3419 /* Show the output dies here. This is necessary for SUBREGs
3420 of pseudos since we cannot track their lifetimes correctly;
3421 hard regs shouldn't appear here except as return values.
3422 We never want to emit such a clobber after reload. */
3424 && ! (reload_in_progress
|| reload_completed
)
3425 && (GET_CODE (realpart_x
) == SUBREG
3426 || GET_CODE (imagpart_x
) == SUBREG
))
3427 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3429 emit_move_insn (realpart_x
, realpart_y
);
3430 emit_move_insn (imagpart_x
, imagpart_y
);
3433 return get_last_insn ();
3436 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3437 find a mode to do it in. If we have a movcc, use it. Otherwise,
3438 find the MODE_INT mode of the same width. */
3439 else if (GET_MODE_CLASS (mode
) == MODE_CC
3440 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3442 enum insn_code insn_code
;
3443 enum machine_mode tmode
= VOIDmode
;
3447 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3450 for (tmode
= QImode
; tmode
!= VOIDmode
;
3451 tmode
= GET_MODE_WIDER_MODE (tmode
))
3452 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3455 if (tmode
== VOIDmode
)
3458 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3459 may call change_address which is not appropriate if we were
3460 called when a reload was in progress. We don't have to worry
3461 about changing the address since the size in bytes is supposed to
3462 be the same. Copy the MEM to change the mode and move any
3463 substitutions from the old MEM to the new one. */
3465 if (reload_in_progress
)
3467 x
= gen_lowpart_common (tmode
, x1
);
3468 if (x
== 0 && GET_CODE (x1
) == MEM
)
3470 x
= adjust_address_nv (x1
, tmode
, 0);
3471 copy_replacements (x1
, x
);
3474 y
= gen_lowpart_common (tmode
, y1
);
3475 if (y
== 0 && GET_CODE (y1
) == MEM
)
3477 y
= adjust_address_nv (y1
, tmode
, 0);
3478 copy_replacements (y1
, y
);
3483 x
= gen_lowpart (tmode
, x
);
3484 y
= gen_lowpart (tmode
, y
);
3487 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3488 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3491 /* This will handle any multi-word or full-word mode that lacks a move_insn
3492 pattern. However, you will get better code if you define such patterns,
3493 even if they must turn into multiple assembler instructions. */
3494 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3501 #ifdef PUSH_ROUNDING
3503 /* If X is a push on the stack, do the push now and replace
3504 X with a reference to the stack pointer. */
3505 if (push_operand (x
, GET_MODE (x
)))
3510 /* Do not use anti_adjust_stack, since we don't want to update
3511 stack_pointer_delta. */
3512 temp
= expand_binop (Pmode
,
3513 #ifdef STACK_GROWS_DOWNWARD
3521 (GET_MODE_SIZE (GET_MODE (x
)))),
3522 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3524 if (temp
!= stack_pointer_rtx
)
3525 emit_move_insn (stack_pointer_rtx
, temp
);
3527 code
= GET_CODE (XEXP (x
, 0));
3529 /* Just hope that small offsets off SP are OK. */
3530 if (code
== POST_INC
)
3531 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3532 GEN_INT (-((HOST_WIDE_INT
)
3533 GET_MODE_SIZE (GET_MODE (x
)))));
3534 else if (code
== POST_DEC
)
3535 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3536 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3538 temp
= stack_pointer_rtx
;
3540 x
= change_address (x
, VOIDmode
, temp
);
3544 /* If we are in reload, see if either operand is a MEM whose address
3545 is scheduled for replacement. */
3546 if (reload_in_progress
&& GET_CODE (x
) == MEM
3547 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3548 x
= replace_equiv_address_nv (x
, inner
);
3549 if (reload_in_progress
&& GET_CODE (y
) == MEM
3550 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3551 y
= replace_equiv_address_nv (y
, inner
);
3557 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3560 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3561 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3563 /* If we can't get a part of Y, put Y into memory if it is a
3564 constant. Otherwise, force it into a register. If we still
3565 can't get a part of Y, abort. */
3566 if (ypart
== 0 && CONSTANT_P (y
))
3568 y
= force_const_mem (mode
, y
);
3569 ypart
= operand_subword (y
, i
, 1, mode
);
3571 else if (ypart
== 0)
3572 ypart
= operand_subword_force (y
, i
, mode
);
3574 if (xpart
== 0 || ypart
== 0)
3577 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3579 last_insn
= emit_move_insn (xpart
, ypart
);
3585 /* Show the output dies here. This is necessary for SUBREGs
3586 of pseudos since we cannot track their lifetimes correctly;
3587 hard regs shouldn't appear here except as return values.
3588 We never want to emit such a clobber after reload. */
3590 && ! (reload_in_progress
|| reload_completed
)
3591 && need_clobber
!= 0)
3592 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3602 /* If Y is representable exactly in a narrower mode, and the target can
3603 perform the extension directly from constant or memory, then emit the
3604 move as an extension. */
3607 compress_float_constant (rtx x
, rtx y
)
3609 enum machine_mode dstmode
= GET_MODE (x
);
3610 enum machine_mode orig_srcmode
= GET_MODE (y
);
3611 enum machine_mode srcmode
;
3614 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3616 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3617 srcmode
!= orig_srcmode
;
3618 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3621 rtx trunc_y
, last_insn
;
3623 /* Skip if the target can't extend this way. */
3624 ic
= can_extend_p (dstmode
, srcmode
, 0);
3625 if (ic
== CODE_FOR_nothing
)
3628 /* Skip if the narrowed value isn't exact. */
3629 if (! exact_real_truncate (srcmode
, &r
))
3632 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3634 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3636 /* Skip if the target needs extra instructions to perform
3638 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3641 else if (float_extend_from_mem
[dstmode
][srcmode
])
3642 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3646 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3647 last_insn
= get_last_insn ();
3649 if (GET_CODE (x
) == REG
)
3650 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3658 /* Pushing data onto the stack. */
3660 /* Push a block of length SIZE (perhaps variable)
3661 and return an rtx to address the beginning of the block.
3662 Note that it is not possible for the value returned to be a QUEUED.
3663 The value may be virtual_outgoing_args_rtx.
3665 EXTRA is the number of bytes of padding to push in addition to SIZE.
3666 BELOW nonzero means this padding comes at low addresses;
3667 otherwise, the padding comes at high addresses. */
3670 push_block (rtx size
, int extra
, int below
)
3674 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3675 if (CONSTANT_P (size
))
3676 anti_adjust_stack (plus_constant (size
, extra
));
3677 else if (GET_CODE (size
) == REG
&& extra
== 0)
3678 anti_adjust_stack (size
);
3681 temp
= copy_to_mode_reg (Pmode
, size
);
3683 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3684 temp
, 0, OPTAB_LIB_WIDEN
);
3685 anti_adjust_stack (temp
);
3688 #ifndef STACK_GROWS_DOWNWARD
3694 temp
= virtual_outgoing_args_rtx
;
3695 if (extra
!= 0 && below
)
3696 temp
= plus_constant (temp
, extra
);
3700 if (GET_CODE (size
) == CONST_INT
)
3701 temp
= plus_constant (virtual_outgoing_args_rtx
,
3702 -INTVAL (size
) - (below
? 0 : extra
));
3703 else if (extra
!= 0 && !below
)
3704 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3705 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3707 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3708 negate_rtx (Pmode
, size
));
3711 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3714 #ifdef PUSH_ROUNDING
3716 /* Emit single push insn. */
3719 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3722 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3724 enum insn_code icode
;
3725 insn_operand_predicate_fn pred
;
3727 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3728 /* If there is push pattern, use it. Otherwise try old way of throwing
3729 MEM representing push operation to move expander. */
3730 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3731 if (icode
!= CODE_FOR_nothing
)
3733 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3734 && !((*pred
) (x
, mode
))))
3735 x
= force_reg (mode
, x
);
3736 emit_insn (GEN_FCN (icode
) (x
));
3739 if (GET_MODE_SIZE (mode
) == rounded_size
)
3740 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3741 /* If we are to pad downward, adjust the stack pointer first and
3742 then store X into the stack location using an offset. This is
3743 because emit_move_insn does not know how to pad; it does not have
3745 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3747 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3748 HOST_WIDE_INT offset
;
3750 emit_move_insn (stack_pointer_rtx
,
3751 expand_binop (Pmode
,
3752 #ifdef STACK_GROWS_DOWNWARD
3758 GEN_INT (rounded_size
),
3759 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3761 offset
= (HOST_WIDE_INT
) padding_size
;
3762 #ifdef STACK_GROWS_DOWNWARD
3763 if (STACK_PUSH_CODE
== POST_DEC
)
3764 /* We have already decremented the stack pointer, so get the
3766 offset
+= (HOST_WIDE_INT
) rounded_size
;
3768 if (STACK_PUSH_CODE
== POST_INC
)
3769 /* We have already incremented the stack pointer, so get the
3771 offset
-= (HOST_WIDE_INT
) rounded_size
;
3773 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3777 #ifdef STACK_GROWS_DOWNWARD
3778 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3779 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3780 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3782 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3783 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3784 GEN_INT (rounded_size
));
3786 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3789 dest
= gen_rtx_MEM (mode
, dest_addr
);
3793 set_mem_attributes (dest
, type
, 1);
3795 if (flag_optimize_sibling_calls
)
3796 /* Function incoming arguments may overlap with sibling call
3797 outgoing arguments and we cannot allow reordering of reads
3798 from function arguments with stores to outgoing arguments
3799 of sibling calls. */
3800 set_mem_alias_set (dest
, 0);
3802 emit_move_insn (dest
, x
);
3806 /* Generate code to push X onto the stack, assuming it has mode MODE and
3808 MODE is redundant except when X is a CONST_INT (since they don't
3810 SIZE is an rtx for the size of data to be copied (in bytes),
3811 needed only if X is BLKmode.
3813 ALIGN (in bits) is maximum alignment we can assume.
3815 If PARTIAL and REG are both nonzero, then copy that many of the first
3816 words of X into registers starting with REG, and push the rest of X.
3817 The amount of space pushed is decreased by PARTIAL words,
3818 rounded *down* to a multiple of PARM_BOUNDARY.
3819 REG must be a hard register in this case.
3820 If REG is zero but PARTIAL is not, take any all others actions for an
3821 argument partially in registers, but do not actually load any
3824 EXTRA is the amount in bytes of extra space to leave next to this arg.
3825 This is ignored if an argument block has already been allocated.
3827 On a machine that lacks real push insns, ARGS_ADDR is the address of
3828 the bottom of the argument block for this call. We use indexing off there
3829 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3830 argument block has not been preallocated.
3832 ARGS_SO_FAR is the size of args previously pushed for this call.
3834 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3835 for arguments passed in registers. If nonzero, it will be the number
3836 of bytes required. */
3839 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3840 unsigned int align
, int partial
, rtx reg
, int extra
,
3841 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3845 enum direction stack_direction
3846 #ifdef STACK_GROWS_DOWNWARD
3852 /* Decide where to pad the argument: `downward' for below,
3853 `upward' for above, or `none' for don't pad it.
3854 Default is below for small data on big-endian machines; else above. */
3855 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3857 /* Invert direction if stack is post-decrement.
3859 if (STACK_PUSH_CODE
== POST_DEC
)
3860 if (where_pad
!= none
)
3861 where_pad
= (where_pad
== downward
? upward
: downward
);
3863 xinner
= x
= protect_from_queue (x
, 0);
3865 if (mode
== BLKmode
)
3867 /* Copy a block into the stack, entirely or partially. */
3870 int used
= partial
* UNITS_PER_WORD
;
3871 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3879 /* USED is now the # of bytes we need not copy to the stack
3880 because registers will take care of them. */
3883 xinner
= adjust_address (xinner
, BLKmode
, used
);
3885 /* If the partial register-part of the arg counts in its stack size,
3886 skip the part of stack space corresponding to the registers.
3887 Otherwise, start copying to the beginning of the stack space,
3888 by setting SKIP to 0. */
3889 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3891 #ifdef PUSH_ROUNDING
3892 /* Do it with several push insns if that doesn't take lots of insns
3893 and if there is no difficulty with push insns that skip bytes
3894 on the stack for alignment purposes. */
3897 && GET_CODE (size
) == CONST_INT
3899 && MEM_ALIGN (xinner
) >= align
3900 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3901 /* Here we avoid the case of a structure whose weak alignment
3902 forces many pushes of a small amount of data,
3903 and such small pushes do rounding that causes trouble. */
3904 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3905 || align
>= BIGGEST_ALIGNMENT
3906 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3907 == (align
/ BITS_PER_UNIT
)))
3908 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3910 /* Push padding now if padding above and stack grows down,
3911 or if padding below and stack grows up.
3912 But if space already allocated, this has already been done. */
3913 if (extra
&& args_addr
== 0
3914 && where_pad
!= none
&& where_pad
!= stack_direction
)
3915 anti_adjust_stack (GEN_INT (extra
));
3917 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3920 #endif /* PUSH_ROUNDING */
3924 /* Otherwise make space on the stack and copy the data
3925 to the address of that space. */
3927 /* Deduct words put into registers from the size we must copy. */
3930 if (GET_CODE (size
) == CONST_INT
)
3931 size
= GEN_INT (INTVAL (size
) - used
);
3933 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3934 GEN_INT (used
), NULL_RTX
, 0,
3938 /* Get the address of the stack space.
3939 In this case, we do not deal with EXTRA separately.
3940 A single stack adjust will do. */
3943 temp
= push_block (size
, extra
, where_pad
== downward
);
3946 else if (GET_CODE (args_so_far
) == CONST_INT
)
3947 temp
= memory_address (BLKmode
,
3948 plus_constant (args_addr
,
3949 skip
+ INTVAL (args_so_far
)));
3951 temp
= memory_address (BLKmode
,
3952 plus_constant (gen_rtx_PLUS (Pmode
,
3957 if (!ACCUMULATE_OUTGOING_ARGS
)
3959 /* If the source is referenced relative to the stack pointer,
3960 copy it to another register to stabilize it. We do not need
3961 to do this if we know that we won't be changing sp. */
3963 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3964 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3965 temp
= copy_to_reg (temp
);
3968 target
= gen_rtx_MEM (BLKmode
, temp
);
3972 set_mem_attributes (target
, type
, 1);
3973 /* Function incoming arguments may overlap with sibling call
3974 outgoing arguments and we cannot allow reordering of reads
3975 from function arguments with stores to outgoing arguments
3976 of sibling calls. */
3977 set_mem_alias_set (target
, 0);
3980 /* ALIGN may well be better aligned than TYPE, e.g. due to
3981 PARM_BOUNDARY. Assume the caller isn't lying. */
3982 set_mem_align (target
, align
);
3984 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3987 else if (partial
> 0)
3989 /* Scalar partly in registers. */
3991 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3994 /* # words of start of argument
3995 that we must make space for but need not store. */
3996 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3997 int args_offset
= INTVAL (args_so_far
);
4000 /* Push padding now if padding above and stack grows down,
4001 or if padding below and stack grows up.
4002 But if space already allocated, this has already been done. */
4003 if (extra
&& args_addr
== 0
4004 && where_pad
!= none
&& where_pad
!= stack_direction
)
4005 anti_adjust_stack (GEN_INT (extra
));
4007 /* If we make space by pushing it, we might as well push
4008 the real data. Otherwise, we can leave OFFSET nonzero
4009 and leave the space uninitialized. */
4013 /* Now NOT_STACK gets the number of words that we don't need to
4014 allocate on the stack. */
4015 not_stack
= partial
- offset
;
4017 /* If the partial register-part of the arg counts in its stack size,
4018 skip the part of stack space corresponding to the registers.
4019 Otherwise, start copying to the beginning of the stack space,
4020 by setting SKIP to 0. */
4021 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4023 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
4024 x
= validize_mem (force_const_mem (mode
, x
));
4026 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4027 SUBREGs of such registers are not allowed. */
4028 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
4029 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4030 x
= copy_to_reg (x
);
4032 /* Loop over all the words allocated on the stack for this arg. */
4033 /* We can do it by words, because any scalar bigger than a word
4034 has a size a multiple of a word. */
4035 #ifndef PUSH_ARGS_REVERSED
4036 for (i
= not_stack
; i
< size
; i
++)
4038 for (i
= size
- 1; i
>= not_stack
; i
--)
4040 if (i
>= not_stack
+ offset
)
4041 emit_push_insn (operand_subword_force (x
, i
, mode
),
4042 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4044 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4046 reg_parm_stack_space
, alignment_pad
);
4053 /* Push padding now if padding above and stack grows down,
4054 or if padding below and stack grows up.
4055 But if space already allocated, this has already been done. */
4056 if (extra
&& args_addr
== 0
4057 && where_pad
!= none
&& where_pad
!= stack_direction
)
4058 anti_adjust_stack (GEN_INT (extra
));
4060 #ifdef PUSH_ROUNDING
4061 if (args_addr
== 0 && PUSH_ARGS
)
4062 emit_single_push_insn (mode
, x
, type
);
4066 if (GET_CODE (args_so_far
) == CONST_INT
)
4068 = memory_address (mode
,
4069 plus_constant (args_addr
,
4070 INTVAL (args_so_far
)));
4072 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4074 dest
= gen_rtx_MEM (mode
, addr
);
4077 set_mem_attributes (dest
, type
, 1);
4078 /* Function incoming arguments may overlap with sibling call
4079 outgoing arguments and we cannot allow reordering of reads
4080 from function arguments with stores to outgoing arguments
4081 of sibling calls. */
4082 set_mem_alias_set (dest
, 0);
4085 emit_move_insn (dest
, x
);
4089 /* If part should go in registers, copy that part
4090 into the appropriate registers. Do this now, at the end,
4091 since mem-to-mem copies above may do function calls. */
4092 if (partial
> 0 && reg
!= 0)
4094 /* Handle calls that pass values in multiple non-contiguous locations.
4095 The Irix 6 ABI has examples of this. */
4096 if (GET_CODE (reg
) == PARALLEL
)
4097 emit_group_load (reg
, x
, type
, -1);
4099 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
4102 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4103 anti_adjust_stack (GEN_INT (extra
));
4105 if (alignment_pad
&& args_addr
== 0)
4106 anti_adjust_stack (alignment_pad
);
4109 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4113 get_subtarget (rtx x
)
4116 /* Only registers can be subtargets. */
4117 || GET_CODE (x
) != REG
4118 /* If the register is readonly, it can't be set more than once. */
4119 || RTX_UNCHANGING_P (x
)
4120 /* Don't use hard regs to avoid extending their life. */
4121 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4122 /* Avoid subtargets inside loops,
4123 since they hide some invariant expressions. */
4124 || preserve_subexpressions_p ())
4128 /* Expand an assignment that stores the value of FROM into TO.
4129 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4130 (This may contain a QUEUED rtx;
4131 if the value is constant, this rtx is a constant.)
4132 Otherwise, the returned value is NULL_RTX. */
4135 expand_assignment (tree to
, tree from
, int want_value
)
4140 /* Don't crash if the lhs of the assignment was erroneous. */
4142 if (TREE_CODE (to
) == ERROR_MARK
)
4144 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4145 return want_value
? result
: NULL_RTX
;
4148 /* Assignment of a structure component needs special treatment
4149 if the structure component's rtx is not simply a MEM.
4150 Assignment of an array element at a constant index, and assignment of
4151 an array element in an unaligned packed structure field, has the same
4154 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4155 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4156 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4158 enum machine_mode mode1
;
4159 HOST_WIDE_INT bitsize
, bitpos
;
4167 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4168 &unsignedp
, &volatilep
);
4170 /* If we are going to use store_bit_field and extract_bit_field,
4171 make sure to_rtx will be safe for multiple use. */
4173 if (mode1
== VOIDmode
&& want_value
)
4174 tem
= stabilize_reference (tem
);
4176 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4180 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4182 if (GET_CODE (to_rtx
) != MEM
)
4185 #ifdef POINTERS_EXTEND_UNSIGNED
4186 if (GET_MODE (offset_rtx
) != Pmode
)
4187 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4189 if (GET_MODE (offset_rtx
) != ptr_mode
)
4190 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4193 /* A constant address in TO_RTX can have VOIDmode, we must not try
4194 to call force_reg for that case. Avoid that case. */
4195 if (GET_CODE (to_rtx
) == MEM
4196 && GET_MODE (to_rtx
) == BLKmode
4197 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4199 && (bitpos
% bitsize
) == 0
4200 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4201 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4203 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4207 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4208 highest_pow2_factor_for_type (TREE_TYPE (to
),
4212 if (GET_CODE (to_rtx
) == MEM
)
4214 /* If the field is at offset zero, we could have been given the
4215 DECL_RTX of the parent struct. Don't munge it. */
4216 to_rtx
= shallow_copy_rtx (to_rtx
);
4218 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4221 /* Deal with volatile and readonly fields. The former is only done
4222 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4223 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4225 if (to_rtx
== orig_to_rtx
)
4226 to_rtx
= copy_rtx (to_rtx
);
4227 MEM_VOLATILE_P (to_rtx
) = 1;
4230 if (TREE_CODE (to
) == COMPONENT_REF
4231 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4233 if (to_rtx
== orig_to_rtx
)
4234 to_rtx
= copy_rtx (to_rtx
);
4235 RTX_UNCHANGING_P (to_rtx
) = 1;
4238 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4240 if (to_rtx
== orig_to_rtx
)
4241 to_rtx
= copy_rtx (to_rtx
);
4242 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4245 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4247 /* Spurious cast for HPUX compiler. */
4248 ? ((enum machine_mode
)
4249 TYPE_MODE (TREE_TYPE (to
)))
4251 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4253 preserve_temp_slots (result
);
4257 /* If the value is meaningful, convert RESULT to the proper mode.
4258 Otherwise, return nothing. */
4259 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4260 TYPE_MODE (TREE_TYPE (from
)),
4262 TREE_UNSIGNED (TREE_TYPE (to
)))
4266 /* If the rhs is a function call and its value is not an aggregate,
4267 call the function before we start to compute the lhs.
4268 This is needed for correct code for cases such as
4269 val = setjmp (buf) on machines where reference to val
4270 requires loading up part of an address in a separate insn.
4272 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4273 since it might be a promoted variable where the zero- or sign- extension
4274 needs to be done. Handling this in the normal way is safe because no
4275 computation is done before the call. */
4276 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4277 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4278 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4279 && GET_CODE (DECL_RTL (to
)) == REG
))
4284 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4286 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4288 /* Handle calls that return values in multiple non-contiguous locations.
4289 The Irix 6 ABI has examples of this. */
4290 if (GET_CODE (to_rtx
) == PARALLEL
)
4291 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4292 int_size_in_bytes (TREE_TYPE (from
)));
4293 else if (GET_MODE (to_rtx
) == BLKmode
)
4294 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4297 #ifdef POINTERS_EXTEND_UNSIGNED
4298 if (POINTER_TYPE_P (TREE_TYPE (to
))
4299 && GET_MODE (to_rtx
) != GET_MODE (value
))
4300 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4302 emit_move_insn (to_rtx
, value
);
4304 preserve_temp_slots (to_rtx
);
4307 return want_value
? to_rtx
: NULL_RTX
;
4310 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4311 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4314 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4316 /* Don't move directly into a return register. */
4317 if (TREE_CODE (to
) == RESULT_DECL
4318 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4323 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4325 if (GET_CODE (to_rtx
) == PARALLEL
)
4326 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4327 int_size_in_bytes (TREE_TYPE (from
)));
4329 emit_move_insn (to_rtx
, temp
);
4331 preserve_temp_slots (to_rtx
);
4334 return want_value
? to_rtx
: NULL_RTX
;
4337 /* In case we are returning the contents of an object which overlaps
4338 the place the value is being stored, use a safe function when copying
4339 a value through a pointer into a structure value return block. */
4340 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4341 && current_function_returns_struct
4342 && !current_function_returns_pcc_struct
)
4347 size
= expr_size (from
);
4348 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4350 if (TARGET_MEM_FUNCTIONS
)
4351 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4352 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4353 XEXP (from_rtx
, 0), Pmode
,
4354 convert_to_mode (TYPE_MODE (sizetype
),
4355 size
, TREE_UNSIGNED (sizetype
)),
4356 TYPE_MODE (sizetype
));
4358 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4359 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4360 XEXP (to_rtx
, 0), Pmode
,
4361 convert_to_mode (TYPE_MODE (integer_type_node
),
4363 TREE_UNSIGNED (integer_type_node
)),
4364 TYPE_MODE (integer_type_node
));
4366 preserve_temp_slots (to_rtx
);
4369 return want_value
? to_rtx
: NULL_RTX
;
4372 /* Compute FROM and store the value in the rtx we got. */
4375 result
= store_expr (from
, to_rtx
, want_value
);
4376 preserve_temp_slots (result
);
4379 return want_value
? result
: NULL_RTX
;
4382 /* Generate code for computing expression EXP,
4383 and storing the value into TARGET.
4384 TARGET may contain a QUEUED rtx.
4386 If WANT_VALUE & 1 is nonzero, return a copy of the value
4387 not in TARGET, so that we can be sure to use the proper
4388 value in a containing expression even if TARGET has something
4389 else stored in it. If possible, we copy the value through a pseudo
4390 and return that pseudo. Or, if the value is constant, we try to
4391 return the constant. In some cases, we return a pseudo
4392 copied *from* TARGET.
4394 If the mode is BLKmode then we may return TARGET itself.
4395 It turns out that in BLKmode it doesn't cause a problem.
4396 because C has no operators that could combine two different
4397 assignments into the same BLKmode object with different values
4398 with no sequence point. Will other languages need this to
4401 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4402 to catch quickly any cases where the caller uses the value
4403 and fails to set WANT_VALUE.
4405 If WANT_VALUE & 2 is set, this is a store into a call param on the
4406 stack, and block moves may need to be treated specially. */
4409 store_expr (tree exp
, rtx target
, int want_value
)
4412 int dont_return_target
= 0;
4413 int dont_store_target
= 0;
4415 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4417 /* C++ can generate ?: expressions with a throw expression in one
4418 branch and an rvalue in the other. Here, we resolve attempts to
4419 store the throw expression's nonexistent result. */
4422 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4425 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4427 /* Perform first part of compound expression, then assign from second
4429 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4430 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4432 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4434 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4436 /* For conditional expression, get safe form of the target. Then
4437 test the condition, doing the appropriate assignment on either
4438 side. This avoids the creation of unnecessary temporaries.
4439 For non-BLKmode, it is more efficient not to do this. */
4441 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4444 target
= protect_from_queue (target
, 1);
4446 do_pending_stack_adjust ();
4448 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4449 start_cleanup_deferral ();
4450 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4451 end_cleanup_deferral ();
4453 emit_jump_insn (gen_jump (lab2
));
4456 start_cleanup_deferral ();
4457 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4458 end_cleanup_deferral ();
4463 return want_value
& 1 ? target
: NULL_RTX
;
4465 else if (queued_subexp_p (target
))
4466 /* If target contains a postincrement, let's not risk
4467 using it as the place to generate the rhs. */
4469 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4471 /* Expand EXP into a new pseudo. */
4472 temp
= gen_reg_rtx (GET_MODE (target
));
4473 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4475 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4478 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4480 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4482 /* If target is volatile, ANSI requires accessing the value
4483 *from* the target, if it is accessed. So make that happen.
4484 In no case return the target itself. */
4485 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4486 dont_return_target
= 1;
4488 else if ((want_value
& 1) != 0
4489 && GET_CODE (target
) == MEM
4490 && ! MEM_VOLATILE_P (target
)
4491 && GET_MODE (target
) != BLKmode
)
4492 /* If target is in memory and caller wants value in a register instead,
4493 arrange that. Pass TARGET as target for expand_expr so that,
4494 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4495 We know expand_expr will not use the target in that case.
4496 Don't do this if TARGET is volatile because we are supposed
4497 to write it and then read it. */
4499 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4500 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4501 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4503 /* If TEMP is already in the desired TARGET, only copy it from
4504 memory and don't store it there again. */
4506 || (rtx_equal_p (temp
, target
)
4507 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4508 dont_store_target
= 1;
4509 temp
= copy_to_reg (temp
);
4511 dont_return_target
= 1;
4513 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4514 /* If this is a scalar in a register that is stored in a wider mode
4515 than the declared mode, compute the result into its declared mode
4516 and then convert to the wider mode. Our value is the computed
4519 rtx inner_target
= 0;
4521 /* If we don't want a value, we can do the conversion inside EXP,
4522 which will often result in some optimizations. Do the conversion
4523 in two steps: first change the signedness, if needed, then
4524 the extend. But don't do this if the type of EXP is a subtype
4525 of something else since then the conversion might involve
4526 more than just converting modes. */
4527 if ((want_value
& 1) == 0
4528 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4529 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4531 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4532 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4534 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4535 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4537 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4538 (GET_MODE (SUBREG_REG (target
)),
4539 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4542 inner_target
= SUBREG_REG (target
);
4545 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4546 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4548 /* If TEMP is a MEM and we want a result value, make the access
4549 now so it gets done only once. Strictly speaking, this is
4550 only necessary if the MEM is volatile, or if the address
4551 overlaps TARGET. But not performing the load twice also
4552 reduces the amount of rtl we generate and then have to CSE. */
4553 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4554 temp
= copy_to_reg (temp
);
4556 /* If TEMP is a VOIDmode constant, use convert_modes to make
4557 sure that we properly convert it. */
4558 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4560 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4561 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4562 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4563 GET_MODE (target
), temp
,
4564 SUBREG_PROMOTED_UNSIGNED_P (target
));
4567 convert_move (SUBREG_REG (target
), temp
,
4568 SUBREG_PROMOTED_UNSIGNED_P (target
));
4570 /* If we promoted a constant, change the mode back down to match
4571 target. Otherwise, the caller might get confused by a result whose
4572 mode is larger than expected. */
4574 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4576 if (GET_MODE (temp
) != VOIDmode
)
4578 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4579 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4580 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4581 SUBREG_PROMOTED_UNSIGNED_P (target
));
4584 temp
= convert_modes (GET_MODE (target
),
4585 GET_MODE (SUBREG_REG (target
)),
4586 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4589 return want_value
& 1 ? temp
: NULL_RTX
;
4593 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4594 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4595 /* Return TARGET if it's a specified hardware register.
4596 If TARGET is a volatile mem ref, either return TARGET
4597 or return a reg copied *from* TARGET; ANSI requires this.
4599 Otherwise, if TEMP is not TARGET, return TEMP
4600 if it is constant (for efficiency),
4601 or if we really want the correct value. */
4602 if (!(target
&& GET_CODE (target
) == REG
4603 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4604 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4605 && ! rtx_equal_p (temp
, target
)
4606 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4607 dont_return_target
= 1;
4610 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4611 the same as that of TARGET, adjust the constant. This is needed, for
4612 example, in case it is a CONST_DOUBLE and we want only a word-sized
4614 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4615 && TREE_CODE (exp
) != ERROR_MARK
4616 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4617 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4618 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4620 /* If value was not generated in the target, store it there.
4621 Convert the value to TARGET's type first if necessary.
4622 If TEMP and TARGET compare equal according to rtx_equal_p, but
4623 one or both of them are volatile memory refs, we have to distinguish
4625 - expand_expr has used TARGET. In this case, we must not generate
4626 another copy. This can be detected by TARGET being equal according
4628 - expand_expr has not used TARGET - that means that the source just
4629 happens to have the same RTX form. Since temp will have been created
4630 by expand_expr, it will compare unequal according to == .
4631 We must generate a copy in this case, to reach the correct number
4632 of volatile memory references. */
4634 if ((! rtx_equal_p (temp
, target
)
4635 || (temp
!= target
&& (side_effects_p (temp
)
4636 || side_effects_p (target
))))
4637 && TREE_CODE (exp
) != ERROR_MARK
4638 && ! dont_store_target
4639 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4640 but TARGET is not valid memory reference, TEMP will differ
4641 from TARGET although it is really the same location. */
4642 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4643 || target
!= DECL_RTL_IF_SET (exp
))
4644 /* If there's nothing to copy, don't bother. Don't call expr_size
4645 unless necessary, because some front-ends (C++) expr_size-hook
4646 aborts on objects that are not supposed to be bit-copied or
4648 && expr_size (exp
) != const0_rtx
)
4650 target
= protect_from_queue (target
, 1);
4651 if (GET_MODE (temp
) != GET_MODE (target
)
4652 && GET_MODE (temp
) != VOIDmode
)
4654 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4655 if (dont_return_target
)
4657 /* In this case, we will return TEMP,
4658 so make sure it has the proper mode.
4659 But don't forget to store the value into TARGET. */
4660 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4661 emit_move_insn (target
, temp
);
4664 convert_move (target
, temp
, unsignedp
);
4667 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4669 /* Handle copying a string constant into an array. The string
4670 constant may be shorter than the array. So copy just the string's
4671 actual length, and clear the rest. First get the size of the data
4672 type of the string, which is actually the size of the target. */
4673 rtx size
= expr_size (exp
);
4675 if (GET_CODE (size
) == CONST_INT
4676 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4677 emit_block_move (target
, temp
, size
,
4679 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4682 /* Compute the size of the data to copy from the string. */
4684 = size_binop (MIN_EXPR
,
4685 make_tree (sizetype
, size
),
4686 size_int (TREE_STRING_LENGTH (exp
)));
4688 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4690 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4693 /* Copy that much. */
4694 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4695 TREE_UNSIGNED (sizetype
));
4696 emit_block_move (target
, temp
, copy_size_rtx
,
4698 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4700 /* Figure out how much is left in TARGET that we have to clear.
4701 Do all calculations in ptr_mode. */
4702 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4704 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4705 target
= adjust_address (target
, BLKmode
,
4706 INTVAL (copy_size_rtx
));
4710 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4711 copy_size_rtx
, NULL_RTX
, 0,
4714 #ifdef POINTERS_EXTEND_UNSIGNED
4715 if (GET_MODE (copy_size_rtx
) != Pmode
)
4716 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4717 TREE_UNSIGNED (sizetype
));
4720 target
= offset_address (target
, copy_size_rtx
,
4721 highest_pow2_factor (copy_size
));
4722 label
= gen_label_rtx ();
4723 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4724 GET_MODE (size
), 0, label
);
4727 if (size
!= const0_rtx
)
4728 clear_storage (target
, size
);
4734 /* Handle calls that return values in multiple non-contiguous locations.
4735 The Irix 6 ABI has examples of this. */
4736 else if (GET_CODE (target
) == PARALLEL
)
4737 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4738 int_size_in_bytes (TREE_TYPE (exp
)));
4739 else if (GET_MODE (temp
) == BLKmode
)
4740 emit_block_move (target
, temp
, expr_size (exp
),
4742 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4744 emit_move_insn (target
, temp
);
4747 /* If we don't want a value, return NULL_RTX. */
4748 if ((want_value
& 1) == 0)
4751 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4752 ??? The latter test doesn't seem to make sense. */
4753 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4756 /* Return TARGET itself if it is a hard register. */
4757 else if ((want_value
& 1) != 0
4758 && GET_MODE (target
) != BLKmode
4759 && ! (GET_CODE (target
) == REG
4760 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4761 return copy_to_reg (target
);
4767 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4770 is_zeros_p (tree exp
)
4774 switch (TREE_CODE (exp
))
4778 case NON_LVALUE_EXPR
:
4779 case VIEW_CONVERT_EXPR
:
4780 return is_zeros_p (TREE_OPERAND (exp
, 0));
4783 return integer_zerop (exp
);
4787 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4790 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4793 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4794 elt
= TREE_CHAIN (elt
))
4795 if (!is_zeros_p (TREE_VALUE (elt
)))
4801 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4802 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4803 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4804 if (! is_zeros_p (TREE_VALUE (elt
)))
4814 /* Return 1 if EXP contains mostly (3/4) zeros. */
4817 mostly_zeros_p (tree exp
)
4819 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4821 int elts
= 0, zeros
= 0;
4822 tree elt
= CONSTRUCTOR_ELTS (exp
);
4823 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4825 /* If there are no ranges of true bits, it is all zero. */
4826 return elt
== NULL_TREE
;
4828 for (; elt
; elt
= TREE_CHAIN (elt
))
4830 /* We do not handle the case where the index is a RANGE_EXPR,
4831 so the statistic will be somewhat inaccurate.
4832 We do make a more accurate count in store_constructor itself,
4833 so since this function is only used for nested array elements,
4834 this should be close enough. */
4835 if (mostly_zeros_p (TREE_VALUE (elt
)))
4840 return 4 * zeros
>= 3 * elts
;
4843 return is_zeros_p (exp
);
4846 /* Helper function for store_constructor.
4847 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4848 TYPE is the type of the CONSTRUCTOR, not the element type.
4849 CLEARED is as for store_constructor.
4850 ALIAS_SET is the alias set to use for any stores.
4852 This provides a recursive shortcut back to store_constructor when it isn't
4853 necessary to go through store_field. This is so that we can pass through
4854 the cleared field to let store_constructor know that we may not have to
4855 clear a substructure if the outer structure has already been cleared. */
4858 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4859 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4860 tree exp
, tree type
, int cleared
, int alias_set
)
4862 if (TREE_CODE (exp
) == CONSTRUCTOR
4863 && bitpos
% BITS_PER_UNIT
== 0
4864 /* If we have a nonzero bitpos for a register target, then we just
4865 let store_field do the bitfield handling. This is unlikely to
4866 generate unnecessary clear instructions anyways. */
4867 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4869 if (GET_CODE (target
) == MEM
)
4871 = adjust_address (target
,
4872 GET_MODE (target
) == BLKmode
4874 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4875 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4878 /* Update the alias set, if required. */
4879 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4880 && MEM_ALIAS_SET (target
) != 0)
4882 target
= copy_rtx (target
);
4883 set_mem_alias_set (target
, alias_set
);
4886 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4889 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4893 /* Store the value of constructor EXP into the rtx TARGET.
4894 TARGET is either a REG or a MEM; we know it cannot conflict, since
4895 safe_from_p has been called.
4896 CLEARED is true if TARGET is known to have been zero'd.
4897 SIZE is the number of bytes of TARGET we are allowed to modify: this
4898 may not be the same as the size of EXP if we are assigning to a field
4899 which has been packed to exclude padding bits. */
4902 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4904 tree type
= TREE_TYPE (exp
);
4905 #ifdef WORD_REGISTER_OPERATIONS
4906 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4909 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4910 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4914 /* If size is zero or the target is already cleared, do nothing. */
4915 if (size
== 0 || cleared
)
4917 /* We either clear the aggregate or indicate the value is dead. */
4918 else if ((TREE_CODE (type
) == UNION_TYPE
4919 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4920 && ! CONSTRUCTOR_ELTS (exp
))
4921 /* If the constructor is empty, clear the union. */
4923 clear_storage (target
, expr_size (exp
));
4927 /* If we are building a static constructor into a register,
4928 set the initial value as zero so we can fold the value into
4929 a constant. But if more than one register is involved,
4930 this probably loses. */
4931 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4932 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4934 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4938 /* If the constructor has fewer fields than the structure
4939 or if we are initializing the structure to mostly zeros,
4940 clear the whole structure first. Don't do this if TARGET is a
4941 register whose mode size isn't equal to SIZE since clear_storage
4942 can't handle this case. */
4943 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4944 || mostly_zeros_p (exp
))
4945 && (GET_CODE (target
) != REG
4946 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4949 rtx xtarget
= target
;
4951 if (readonly_fields_p (type
))
4953 xtarget
= copy_rtx (xtarget
);
4954 RTX_UNCHANGING_P (xtarget
) = 1;
4957 clear_storage (xtarget
, GEN_INT (size
));
4962 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4964 /* Store each element of the constructor into
4965 the corresponding field of TARGET. */
4967 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4969 tree field
= TREE_PURPOSE (elt
);
4970 tree value
= TREE_VALUE (elt
);
4971 enum machine_mode mode
;
4972 HOST_WIDE_INT bitsize
;
4973 HOST_WIDE_INT bitpos
= 0;
4975 rtx to_rtx
= target
;
4977 /* Just ignore missing fields.
4978 We cleared the whole structure, above,
4979 if any fields are missing. */
4983 if (cleared
&& is_zeros_p (value
))
4986 if (host_integerp (DECL_SIZE (field
), 1))
4987 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4991 mode
= DECL_MODE (field
);
4992 if (DECL_BIT_FIELD (field
))
4995 offset
= DECL_FIELD_OFFSET (field
);
4996 if (host_integerp (offset
, 0)
4997 && host_integerp (bit_position (field
), 0))
4999 bitpos
= int_bit_position (field
);
5003 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5009 if (CONTAINS_PLACEHOLDER_P (offset
))
5010 offset
= build (WITH_RECORD_EXPR
, sizetype
,
5011 offset
, make_tree (TREE_TYPE (exp
), target
));
5013 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5014 if (GET_CODE (to_rtx
) != MEM
)
5017 #ifdef POINTERS_EXTEND_UNSIGNED
5018 if (GET_MODE (offset_rtx
) != Pmode
)
5019 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5021 if (GET_MODE (offset_rtx
) != ptr_mode
)
5022 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5025 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5026 highest_pow2_factor (offset
));
5029 if (TREE_READONLY (field
))
5031 if (GET_CODE (to_rtx
) == MEM
)
5032 to_rtx
= copy_rtx (to_rtx
);
5034 RTX_UNCHANGING_P (to_rtx
) = 1;
5037 #ifdef WORD_REGISTER_OPERATIONS
5038 /* If this initializes a field that is smaller than a word, at the
5039 start of a word, try to widen it to a full word.
5040 This special case allows us to output C++ member function
5041 initializations in a form that the optimizers can understand. */
5042 if (GET_CODE (target
) == REG
5043 && bitsize
< BITS_PER_WORD
5044 && bitpos
% BITS_PER_WORD
== 0
5045 && GET_MODE_CLASS (mode
) == MODE_INT
5046 && TREE_CODE (value
) == INTEGER_CST
5048 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5050 tree type
= TREE_TYPE (value
);
5052 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5054 type
= (*lang_hooks
.types
.type_for_size
)
5055 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
5056 value
= convert (type
, value
);
5059 if (BYTES_BIG_ENDIAN
)
5061 = fold (build (LSHIFT_EXPR
, type
, value
,
5062 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
5063 bitsize
= BITS_PER_WORD
;
5068 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5069 && DECL_NONADDRESSABLE_P (field
))
5071 to_rtx
= copy_rtx (to_rtx
);
5072 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5075 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5076 value
, type
, cleared
,
5077 get_alias_set (TREE_TYPE (field
)));
5080 else if (TREE_CODE (type
) == ARRAY_TYPE
5081 || TREE_CODE (type
) == VECTOR_TYPE
)
5086 tree domain
= TYPE_DOMAIN (type
);
5087 tree elttype
= TREE_TYPE (type
);
5089 HOST_WIDE_INT minelt
= 0;
5090 HOST_WIDE_INT maxelt
= 0;
5092 /* Vectors are like arrays, but the domain is stored via an array
5094 if (TREE_CODE (type
) == VECTOR_TYPE
)
5096 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5097 the same field as TYPE_DOMAIN, we are not guaranteed that
5099 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
5100 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
5103 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5104 && TYPE_MAX_VALUE (domain
)
5105 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5106 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5108 /* If we have constant bounds for the range of the type, get them. */
5111 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5112 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5115 /* If the constructor has fewer elements than the array,
5116 clear the whole array first. Similarly if this is
5117 static constructor of a non-BLKmode object. */
5118 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
5122 HOST_WIDE_INT count
= 0, zero_count
= 0;
5123 need_to_clear
= ! const_bounds_p
;
5125 /* This loop is a more accurate version of the loop in
5126 mostly_zeros_p (it handles RANGE_EXPR in an index).
5127 It is also needed to check for missing elements. */
5128 for (elt
= CONSTRUCTOR_ELTS (exp
);
5129 elt
!= NULL_TREE
&& ! need_to_clear
;
5130 elt
= TREE_CHAIN (elt
))
5132 tree index
= TREE_PURPOSE (elt
);
5133 HOST_WIDE_INT this_node_count
;
5135 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5137 tree lo_index
= TREE_OPERAND (index
, 0);
5138 tree hi_index
= TREE_OPERAND (index
, 1);
5140 if (! host_integerp (lo_index
, 1)
5141 || ! host_integerp (hi_index
, 1))
5147 this_node_count
= (tree_low_cst (hi_index
, 1)
5148 - tree_low_cst (lo_index
, 1) + 1);
5151 this_node_count
= 1;
5153 count
+= this_node_count
;
5154 if (mostly_zeros_p (TREE_VALUE (elt
)))
5155 zero_count
+= this_node_count
;
5158 /* Clear the entire array first if there are any missing elements,
5159 or if the incidence of zero elements is >= 75%. */
5161 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5165 if (need_to_clear
&& size
> 0)
5170 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5172 clear_storage (target
, GEN_INT (size
));
5176 else if (REG_P (target
))
5177 /* Inform later passes that the old value is dead. */
5178 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5180 /* Store each element of the constructor into
5181 the corresponding element of TARGET, determined
5182 by counting the elements. */
5183 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5185 elt
= TREE_CHAIN (elt
), i
++)
5187 enum machine_mode mode
;
5188 HOST_WIDE_INT bitsize
;
5189 HOST_WIDE_INT bitpos
;
5191 tree value
= TREE_VALUE (elt
);
5192 tree index
= TREE_PURPOSE (elt
);
5193 rtx xtarget
= target
;
5195 if (cleared
&& is_zeros_p (value
))
5198 unsignedp
= TREE_UNSIGNED (elttype
);
5199 mode
= TYPE_MODE (elttype
);
5200 if (mode
== BLKmode
)
5201 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5202 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5205 bitsize
= GET_MODE_BITSIZE (mode
);
5207 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5209 tree lo_index
= TREE_OPERAND (index
, 0);
5210 tree hi_index
= TREE_OPERAND (index
, 1);
5211 rtx index_r
, pos_rtx
, loop_end
;
5212 struct nesting
*loop
;
5213 HOST_WIDE_INT lo
, hi
, count
;
5216 /* If the range is constant and "small", unroll the loop. */
5218 && host_integerp (lo_index
, 0)
5219 && host_integerp (hi_index
, 0)
5220 && (lo
= tree_low_cst (lo_index
, 0),
5221 hi
= tree_low_cst (hi_index
, 0),
5222 count
= hi
- lo
+ 1,
5223 (GET_CODE (target
) != MEM
5225 || (host_integerp (TYPE_SIZE (elttype
), 1)
5226 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5229 lo
-= minelt
; hi
-= minelt
;
5230 for (; lo
<= hi
; lo
++)
5232 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5234 if (GET_CODE (target
) == MEM
5235 && !MEM_KEEP_ALIAS_SET_P (target
)
5236 && TREE_CODE (type
) == ARRAY_TYPE
5237 && TYPE_NONALIASED_COMPONENT (type
))
5239 target
= copy_rtx (target
);
5240 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5243 store_constructor_field
5244 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5245 get_alias_set (elttype
));
5250 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5251 loop_end
= gen_label_rtx ();
5253 unsignedp
= TREE_UNSIGNED (domain
);
5255 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5258 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5260 SET_DECL_RTL (index
, index_r
);
5261 if (TREE_CODE (value
) == SAVE_EXPR
5262 && SAVE_EXPR_RTL (value
) == 0)
5264 /* Make sure value gets expanded once before the
5266 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5269 store_expr (lo_index
, index_r
, 0);
5270 loop
= expand_start_loop (0);
5272 /* Assign value to element index. */
5274 = convert (ssizetype
,
5275 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5276 index
, TYPE_MIN_VALUE (domain
))));
5277 position
= size_binop (MULT_EXPR
, position
,
5279 TYPE_SIZE_UNIT (elttype
)));
5281 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5282 xtarget
= offset_address (target
, pos_rtx
,
5283 highest_pow2_factor (position
));
5284 xtarget
= adjust_address (xtarget
, mode
, 0);
5285 if (TREE_CODE (value
) == CONSTRUCTOR
)
5286 store_constructor (value
, xtarget
, cleared
,
5287 bitsize
/ BITS_PER_UNIT
);
5289 store_expr (value
, xtarget
, 0);
5291 expand_exit_loop_if_false (loop
,
5292 build (LT_EXPR
, integer_type_node
,
5295 expand_increment (build (PREINCREMENT_EXPR
,
5297 index
, integer_one_node
), 0, 0);
5299 emit_label (loop_end
);
5302 else if ((index
!= 0 && ! host_integerp (index
, 0))
5303 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5308 index
= ssize_int (1);
5311 index
= convert (ssizetype
,
5312 fold (build (MINUS_EXPR
, index
,
5313 TYPE_MIN_VALUE (domain
))));
5315 position
= size_binop (MULT_EXPR
, index
,
5317 TYPE_SIZE_UNIT (elttype
)));
5318 xtarget
= offset_address (target
,
5319 expand_expr (position
, 0, VOIDmode
, 0),
5320 highest_pow2_factor (position
));
5321 xtarget
= adjust_address (xtarget
, mode
, 0);
5322 store_expr (value
, xtarget
, 0);
5327 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5328 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5330 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5332 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5333 && TREE_CODE (type
) == ARRAY_TYPE
5334 && TYPE_NONALIASED_COMPONENT (type
))
5336 target
= copy_rtx (target
);
5337 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5340 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5341 type
, cleared
, get_alias_set (elttype
));
5347 /* Set constructor assignments. */
5348 else if (TREE_CODE (type
) == SET_TYPE
)
5350 tree elt
= CONSTRUCTOR_ELTS (exp
);
5351 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5352 tree domain
= TYPE_DOMAIN (type
);
5353 tree domain_min
, domain_max
, bitlength
;
5355 /* The default implementation strategy is to extract the constant
5356 parts of the constructor, use that to initialize the target,
5357 and then "or" in whatever non-constant ranges we need in addition.
5359 If a large set is all zero or all ones, it is
5360 probably better to set it using memset (if available) or bzero.
5361 Also, if a large set has just a single range, it may also be
5362 better to first clear all the first clear the set (using
5363 bzero/memset), and set the bits we want. */
5365 /* Check for all zeros. */
5366 if (elt
== NULL_TREE
&& size
> 0)
5369 clear_storage (target
, GEN_INT (size
));
5373 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5374 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5375 bitlength
= size_binop (PLUS_EXPR
,
5376 size_diffop (domain_max
, domain_min
),
5379 nbits
= tree_low_cst (bitlength
, 1);
5381 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5382 are "complicated" (more than one range), initialize (the
5383 constant parts) by copying from a constant. */
5384 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5385 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5387 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5388 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5389 char *bit_buffer
= alloca (nbits
);
5390 HOST_WIDE_INT word
= 0;
5391 unsigned int bit_pos
= 0;
5392 unsigned int ibit
= 0;
5393 unsigned int offset
= 0; /* In bytes from beginning of set. */
5395 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5398 if (bit_buffer
[ibit
])
5400 if (BYTES_BIG_ENDIAN
)
5401 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5403 word
|= 1 << bit_pos
;
5407 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5409 if (word
!= 0 || ! cleared
)
5411 rtx datum
= GEN_INT (word
);
5414 /* The assumption here is that it is safe to use
5415 XEXP if the set is multi-word, but not if
5416 it's single-word. */
5417 if (GET_CODE (target
) == MEM
)
5418 to_rtx
= adjust_address (target
, mode
, offset
);
5419 else if (offset
== 0)
5423 emit_move_insn (to_rtx
, datum
);
5430 offset
+= set_word_size
/ BITS_PER_UNIT
;
5435 /* Don't bother clearing storage if the set is all ones. */
5436 if (TREE_CHAIN (elt
) != NULL_TREE
5437 || (TREE_PURPOSE (elt
) == NULL_TREE
5439 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5440 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5441 || (tree_low_cst (TREE_VALUE (elt
), 0)
5442 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5443 != (HOST_WIDE_INT
) nbits
))))
5444 clear_storage (target
, expr_size (exp
));
5446 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5448 /* Start of range of element or NULL. */
5449 tree startbit
= TREE_PURPOSE (elt
);
5450 /* End of range of element, or element value. */
5451 tree endbit
= TREE_VALUE (elt
);
5452 HOST_WIDE_INT startb
, endb
;
5453 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5455 bitlength_rtx
= expand_expr (bitlength
,
5456 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5458 /* Handle non-range tuple element like [ expr ]. */
5459 if (startbit
== NULL_TREE
)
5461 startbit
= save_expr (endbit
);
5465 startbit
= convert (sizetype
, startbit
);
5466 endbit
= convert (sizetype
, endbit
);
5467 if (! integer_zerop (domain_min
))
5469 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5470 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5472 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5473 EXPAND_CONST_ADDRESS
);
5474 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5475 EXPAND_CONST_ADDRESS
);
5481 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5482 (GET_MODE (target
), 0),
5485 emit_move_insn (targetx
, target
);
5488 else if (GET_CODE (target
) == MEM
)
5493 /* Optimization: If startbit and endbit are constants divisible
5494 by BITS_PER_UNIT, call memset instead. */
5495 if (TARGET_MEM_FUNCTIONS
5496 && TREE_CODE (startbit
) == INTEGER_CST
5497 && TREE_CODE (endbit
) == INTEGER_CST
5498 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5499 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5501 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5503 plus_constant (XEXP (targetx
, 0),
5504 startb
/ BITS_PER_UNIT
),
5506 constm1_rtx
, TYPE_MODE (integer_type_node
),
5507 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5508 TYPE_MODE (sizetype
));
5511 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5512 VOIDmode
, 4, XEXP (targetx
, 0),
5513 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5514 startbit_rtx
, TYPE_MODE (sizetype
),
5515 endbit_rtx
, TYPE_MODE (sizetype
));
5518 emit_move_insn (target
, targetx
);
5526 /* Store the value of EXP (an expression tree)
5527 into a subfield of TARGET which has mode MODE and occupies
5528 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5529 If MODE is VOIDmode, it means that we are storing into a bit-field.
5531 If VALUE_MODE is VOIDmode, return nothing in particular.
5532 UNSIGNEDP is not used in this case.
5534 Otherwise, return an rtx for the value stored. This rtx
5535 has mode VALUE_MODE if that is convenient to do.
5536 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5538 TYPE is the type of the underlying object,
5540 ALIAS_SET is the alias set for the destination. This value will
5541 (in general) be different from that for TARGET, since TARGET is a
5542 reference to the containing structure. */
5545 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5546 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5547 int unsignedp
, tree type
, int alias_set
)
5549 HOST_WIDE_INT width_mask
= 0;
5551 if (TREE_CODE (exp
) == ERROR_MARK
)
5554 /* If we have nothing to store, do nothing unless the expression has
5557 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5558 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5559 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5561 /* If we are storing into an unaligned field of an aligned union that is
5562 in a register, we may have the mode of TARGET being an integer mode but
5563 MODE == BLKmode. In that case, get an aligned object whose size and
5564 alignment are the same as TARGET and store TARGET into it (we can avoid
5565 the store if the field being stored is the entire width of TARGET). Then
5566 call ourselves recursively to store the field into a BLKmode version of
5567 that object. Finally, load from the object into TARGET. This is not
5568 very efficient in general, but should only be slightly more expensive
5569 than the otherwise-required unaligned accesses. Perhaps this can be
5570 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5571 twice, once with emit_move_insn and once via store_field. */
5574 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5576 rtx object
= assign_temp (type
, 0, 1, 1);
5577 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5579 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5580 emit_move_insn (object
, target
);
5582 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5585 emit_move_insn (target
, object
);
5587 /* We want to return the BLKmode version of the data. */
5591 if (GET_CODE (target
) == CONCAT
)
5593 /* We're storing into a struct containing a single __complex. */
5597 return store_expr (exp
, target
, 0);
5600 /* If the structure is in a register or if the component
5601 is a bit field, we cannot use addressing to access it.
5602 Use bit-field techniques or SUBREG to store in it. */
5604 if (mode
== VOIDmode
5605 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5606 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5607 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5608 || GET_CODE (target
) == REG
5609 || GET_CODE (target
) == SUBREG
5610 /* If the field isn't aligned enough to store as an ordinary memref,
5611 store it as a bit field. */
5613 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5614 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5615 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5616 || (bitpos
% BITS_PER_UNIT
!= 0)))
5617 /* If the RHS and field are a constant size and the size of the
5618 RHS isn't the same size as the bitfield, we must use bitfield
5621 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5622 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5624 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5626 /* If BITSIZE is narrower than the size of the type of EXP
5627 we will be narrowing TEMP. Normally, what's wanted are the
5628 low-order bits. However, if EXP's type is a record and this is
5629 big-endian machine, we want the upper BITSIZE bits. */
5630 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5631 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5632 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5633 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5634 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5638 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5640 if (mode
!= VOIDmode
&& mode
!= BLKmode
5641 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5642 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5644 /* If the modes of TARGET and TEMP are both BLKmode, both
5645 must be in memory and BITPOS must be aligned on a byte
5646 boundary. If so, we simply do a block copy. */
5647 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5649 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5650 || bitpos
% BITS_PER_UNIT
!= 0)
5653 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5654 emit_block_move (target
, temp
,
5655 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5659 return value_mode
== VOIDmode
? const0_rtx
: target
;
5662 /* Store the value in the bitfield. */
5663 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5664 int_size_in_bytes (type
));
5666 if (value_mode
!= VOIDmode
)
5668 /* The caller wants an rtx for the value.
5669 If possible, avoid refetching from the bitfield itself. */
5671 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5674 enum machine_mode tmode
;
5676 tmode
= GET_MODE (temp
);
5677 if (tmode
== VOIDmode
)
5681 return expand_and (tmode
, temp
,
5682 gen_int_mode (width_mask
, tmode
),
5685 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5686 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5687 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5690 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5691 NULL_RTX
, value_mode
, VOIDmode
,
5692 int_size_in_bytes (type
));
5698 rtx addr
= XEXP (target
, 0);
5699 rtx to_rtx
= target
;
5701 /* If a value is wanted, it must be the lhs;
5702 so make the address stable for multiple use. */
5704 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5705 && ! CONSTANT_ADDRESS_P (addr
)
5706 /* A frame-pointer reference is already stable. */
5707 && ! (GET_CODE (addr
) == PLUS
5708 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5709 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5710 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5711 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5713 /* Now build a reference to just the desired component. */
5715 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5717 if (to_rtx
== target
)
5718 to_rtx
= copy_rtx (to_rtx
);
5720 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5721 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5722 set_mem_alias_set (to_rtx
, alias_set
);
5724 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5728 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5729 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5730 codes and find the ultimate containing object, which we return.
5732 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5733 bit position, and *PUNSIGNEDP to the signedness of the field.
5734 If the position of the field is variable, we store a tree
5735 giving the variable offset (in units) in *POFFSET.
5736 This offset is in addition to the bit position.
5737 If the position is not variable, we store 0 in *POFFSET.
5739 If any of the extraction expressions is volatile,
5740 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5742 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5743 is a mode that can be used to access the field. In that case, *PBITSIZE
5746 If the field describes a variable-sized object, *PMODE is set to
5747 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5748 this case, but the address of the object can be found. */
5751 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5752 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5753 enum machine_mode
*pmode
, int *punsignedp
,
5757 enum machine_mode mode
= VOIDmode
;
5758 tree offset
= size_zero_node
;
5759 tree bit_offset
= bitsize_zero_node
;
5760 tree placeholder_ptr
= 0;
5763 /* First get the mode, signedness, and size. We do this from just the
5764 outermost expression. */
5765 if (TREE_CODE (exp
) == COMPONENT_REF
)
5767 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5768 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5769 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5771 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5773 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5775 size_tree
= TREE_OPERAND (exp
, 1);
5776 *punsignedp
= TREE_UNSIGNED (exp
);
5780 mode
= TYPE_MODE (TREE_TYPE (exp
));
5781 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5783 if (mode
== BLKmode
)
5784 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5786 *pbitsize
= GET_MODE_BITSIZE (mode
);
5791 if (! host_integerp (size_tree
, 1))
5792 mode
= BLKmode
, *pbitsize
= -1;
5794 *pbitsize
= tree_low_cst (size_tree
, 1);
5797 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5798 and find the ultimate containing object. */
5801 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5802 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5803 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5805 tree field
= TREE_OPERAND (exp
, 1);
5806 tree this_offset
= DECL_FIELD_OFFSET (field
);
5808 /* If this field hasn't been filled in yet, don't go
5809 past it. This should only happen when folding expressions
5810 made during type construction. */
5811 if (this_offset
== 0)
5813 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5814 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5816 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5817 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5818 DECL_FIELD_BIT_OFFSET (field
));
5820 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5823 else if (TREE_CODE (exp
) == ARRAY_REF
5824 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5826 tree index
= TREE_OPERAND (exp
, 1);
5827 tree array
= TREE_OPERAND (exp
, 0);
5828 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5829 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5830 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5832 /* We assume all arrays have sizes that are a multiple of a byte.
5833 First subtract the lower bound, if any, in the type of the
5834 index, then convert to sizetype and multiply by the size of the
5836 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5837 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5840 /* If the index has a self-referential type, pass it to a
5841 WITH_RECORD_EXPR; if the component size is, pass our
5842 component to one. */
5843 if (CONTAINS_PLACEHOLDER_P (index
))
5844 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5845 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5846 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5848 offset
= size_binop (PLUS_EXPR
, offset
,
5849 size_binop (MULT_EXPR
,
5850 convert (sizetype
, index
),
5854 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5856 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5858 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5859 We might have been called from tree optimization where we
5860 haven't set up an object yet. */
5869 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5870 conversions that don't change the mode, and all view conversions
5871 except those that need to "step up" the alignment. */
5872 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5873 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5874 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5875 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5877 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5878 < BIGGEST_ALIGNMENT
)
5879 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5880 || TYPE_ALIGN_OK (TREE_TYPE
5881 (TREE_OPERAND (exp
, 0))))))
5882 && ! ((TREE_CODE (exp
) == NOP_EXPR
5883 || TREE_CODE (exp
) == CONVERT_EXPR
)
5884 && (TYPE_MODE (TREE_TYPE (exp
))
5885 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5888 /* If any reference in the chain is volatile, the effect is volatile. */
5889 if (TREE_THIS_VOLATILE (exp
))
5892 exp
= TREE_OPERAND (exp
, 0);
5895 /* If OFFSET is constant, see if we can return the whole thing as a
5896 constant bit position. Otherwise, split it up. */
5897 if (host_integerp (offset
, 0)
5898 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5900 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5901 && host_integerp (tem
, 0))
5902 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5904 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5910 /* Return 1 if T is an expression that get_inner_reference handles. */
5913 handled_component_p (tree t
)
5915 switch (TREE_CODE (t
))
5920 case ARRAY_RANGE_REF
:
5921 case NON_LVALUE_EXPR
:
5922 case VIEW_CONVERT_EXPR
:
5925 /* ??? Sure they are handled, but get_inner_reference may return
5926 a different PBITSIZE, depending upon whether the expression is
5927 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5930 return (TYPE_MODE (TREE_TYPE (t
))
5931 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5938 /* Given an rtx VALUE that may contain additions and multiplications, return
5939 an equivalent value that just refers to a register, memory, or constant.
5940 This is done by generating instructions to perform the arithmetic and
5941 returning a pseudo-register containing the value.
5943 The returned value may be a REG, SUBREG, MEM or constant. */
5946 force_operand (rtx value
, rtx target
)
5949 /* Use subtarget as the target for operand 0 of a binary operation. */
5950 rtx subtarget
= get_subtarget (target
);
5951 enum rtx_code code
= GET_CODE (value
);
5953 /* Check for a PIC address load. */
5954 if ((code
== PLUS
|| code
== MINUS
)
5955 && XEXP (value
, 0) == pic_offset_table_rtx
5956 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5957 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5958 || GET_CODE (XEXP (value
, 1)) == CONST
))
5961 subtarget
= gen_reg_rtx (GET_MODE (value
));
5962 emit_move_insn (subtarget
, value
);
5966 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5969 target
= gen_reg_rtx (GET_MODE (value
));
5970 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5971 code
== ZERO_EXTEND
);
5975 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5977 op2
= XEXP (value
, 1);
5978 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5980 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5983 op2
= negate_rtx (GET_MODE (value
), op2
);
5986 /* Check for an addition with OP2 a constant integer and our first
5987 operand a PLUS of a virtual register and something else. In that
5988 case, we want to emit the sum of the virtual register and the
5989 constant first and then add the other value. This allows virtual
5990 register instantiation to simply modify the constant rather than
5991 creating another one around this addition. */
5992 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5993 && GET_CODE (XEXP (value
, 0)) == PLUS
5994 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5995 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5996 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5998 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5999 XEXP (XEXP (value
, 0), 0), op2
,
6000 subtarget
, 0, OPTAB_LIB_WIDEN
);
6001 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6002 force_operand (XEXP (XEXP (value
,
6004 target
, 0, OPTAB_LIB_WIDEN
);
6007 op1
= force_operand (XEXP (value
, 0), subtarget
);
6008 op2
= force_operand (op2
, NULL_RTX
);
6012 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6014 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6015 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6016 target
, 1, OPTAB_LIB_WIDEN
);
6018 return expand_divmod (0,
6019 FLOAT_MODE_P (GET_MODE (value
))
6020 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6021 GET_MODE (value
), op1
, op2
, target
, 0);
6024 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6028 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6032 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6036 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6037 target
, 0, OPTAB_LIB_WIDEN
);
6040 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6041 target
, 1, OPTAB_LIB_WIDEN
);
6044 if (GET_RTX_CLASS (code
) == '1')
6046 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6047 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6050 #ifdef INSN_SCHEDULING
6051 /* On machines that have insn scheduling, we want all memory reference to be
6052 explicit, so we need to deal with such paradoxical SUBREGs. */
6053 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
6054 && (GET_MODE_SIZE (GET_MODE (value
))
6055 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6057 = simplify_gen_subreg (GET_MODE (value
),
6058 force_reg (GET_MODE (SUBREG_REG (value
)),
6059 force_operand (SUBREG_REG (value
),
6061 GET_MODE (SUBREG_REG (value
)),
6062 SUBREG_BYTE (value
));
6068 /* Subroutine of expand_expr: return nonzero iff there is no way that
6069 EXP can reference X, which is being modified. TOP_P is nonzero if this
6070 call is going to be used to determine whether we need a temporary
6071 for EXP, as opposed to a recursive call to this function.
6073 It is always safe for this routine to return zero since it merely
6074 searches for optimization opportunities. */
6077 safe_from_p (rtx x
, tree exp
, int top_p
)
6081 static tree save_expr_list
;
6084 /* If EXP has varying size, we MUST use a target since we currently
6085 have no way of allocating temporaries of variable size
6086 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6087 So we assume here that something at a higher level has prevented a
6088 clash. This is somewhat bogus, but the best we can do. Only
6089 do this when X is BLKmode and when we are at the top level. */
6090 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6091 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6092 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6093 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6094 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6096 && GET_MODE (x
) == BLKmode
)
6097 /* If X is in the outgoing argument area, it is always safe. */
6098 || (GET_CODE (x
) == MEM
6099 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6100 || (GET_CODE (XEXP (x
, 0)) == PLUS
6101 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6104 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6105 find the underlying pseudo. */
6106 if (GET_CODE (x
) == SUBREG
)
6109 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6113 /* A SAVE_EXPR might appear many times in the expression passed to the
6114 top-level safe_from_p call, and if it has a complex subexpression,
6115 examining it multiple times could result in a combinatorial explosion.
6116 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6117 with optimization took about 28 minutes to compile -- even though it was
6118 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6119 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6120 we have processed. Note that the only test of top_p was above. */
6129 rtn
= safe_from_p (x
, exp
, 0);
6131 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
6132 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
6137 /* Now look at our tree code and possibly recurse. */
6138 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6141 exp_rtl
= DECL_RTL_IF_SET (exp
);
6148 if (TREE_CODE (exp
) == TREE_LIST
)
6152 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6154 exp
= TREE_CHAIN (exp
);
6157 if (TREE_CODE (exp
) != TREE_LIST
)
6158 return safe_from_p (x
, exp
, 0);
6161 else if (TREE_CODE (exp
) == ERROR_MARK
)
6162 return 1; /* An already-visited SAVE_EXPR? */
6168 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6173 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6177 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6178 the expression. If it is set, we conflict iff we are that rtx or
6179 both are in memory. Otherwise, we check all operands of the
6180 expression recursively. */
6182 switch (TREE_CODE (exp
))
6185 /* If the operand is static or we are static, we can't conflict.
6186 Likewise if we don't conflict with the operand at all. */
6187 if (staticp (TREE_OPERAND (exp
, 0))
6188 || TREE_STATIC (exp
)
6189 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6192 /* Otherwise, the only way this can conflict is if we are taking
6193 the address of a DECL a that address if part of X, which is
6195 exp
= TREE_OPERAND (exp
, 0);
6198 if (!DECL_RTL_SET_P (exp
)
6199 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6202 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6207 if (GET_CODE (x
) == MEM
6208 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6209 get_alias_set (exp
)))
6214 /* Assume that the call will clobber all hard registers and
6216 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6217 || GET_CODE (x
) == MEM
)
6222 /* If a sequence exists, we would have to scan every instruction
6223 in the sequence to see if it was safe. This is probably not
6225 if (RTL_EXPR_SEQUENCE (exp
))
6228 exp_rtl
= RTL_EXPR_RTL (exp
);
6231 case WITH_CLEANUP_EXPR
:
6232 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6235 case CLEANUP_POINT_EXPR
:
6236 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6239 exp_rtl
= SAVE_EXPR_RTL (exp
);
6243 /* If we've already scanned this, don't do it again. Otherwise,
6244 show we've scanned it and record for clearing the flag if we're
6246 if (TREE_PRIVATE (exp
))
6249 TREE_PRIVATE (exp
) = 1;
6250 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6252 TREE_PRIVATE (exp
) = 0;
6256 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6260 /* The only operand we look at is operand 1. The rest aren't
6261 part of the expression. */
6262 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6268 /* If we have an rtx, we do not need to scan our operands. */
6272 nops
= first_rtl_op (TREE_CODE (exp
));
6273 for (i
= 0; i
< nops
; i
++)
6274 if (TREE_OPERAND (exp
, i
) != 0
6275 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6278 /* If this is a language-specific tree code, it may require
6279 special handling. */
6280 if ((unsigned int) TREE_CODE (exp
)
6281 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6282 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6286 /* If we have an rtl, find any enclosed object. Then see if we conflict
6290 if (GET_CODE (exp_rtl
) == SUBREG
)
6292 exp_rtl
= SUBREG_REG (exp_rtl
);
6293 if (GET_CODE (exp_rtl
) == REG
6294 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6298 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6299 are memory and they conflict. */
6300 return ! (rtx_equal_p (x
, exp_rtl
)
6301 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6302 && true_dependence (exp_rtl
, VOIDmode
, x
,
6303 rtx_addr_varies_p
)));
6306 /* If we reach here, it is safe. */
6310 /* Subroutine of expand_expr: return rtx if EXP is a
6311 variable or parameter; else return 0. */
6317 switch (TREE_CODE (exp
))
6321 return DECL_RTL (exp
);
6327 #ifdef MAX_INTEGER_COMPUTATION_MODE
6330 check_max_integer_computation_mode (tree exp
)
6332 enum tree_code code
;
6333 enum machine_mode mode
;
6335 /* Strip any NOPs that don't change the mode. */
6337 code
= TREE_CODE (exp
);
6339 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6340 if (code
== NOP_EXPR
6341 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6344 /* First check the type of the overall operation. We need only look at
6345 unary, binary and relational operations. */
6346 if (TREE_CODE_CLASS (code
) == '1'
6347 || TREE_CODE_CLASS (code
) == '2'
6348 || TREE_CODE_CLASS (code
) == '<')
6350 mode
= TYPE_MODE (TREE_TYPE (exp
));
6351 if (GET_MODE_CLASS (mode
) == MODE_INT
6352 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6353 internal_error ("unsupported wide integer operation");
6356 /* Check operand of a unary op. */
6357 if (TREE_CODE_CLASS (code
) == '1')
6359 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6360 if (GET_MODE_CLASS (mode
) == MODE_INT
6361 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6362 internal_error ("unsupported wide integer operation");
6365 /* Check operands of a binary/comparison op. */
6366 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6368 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6369 if (GET_MODE_CLASS (mode
) == MODE_INT
6370 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6371 internal_error ("unsupported wide integer operation");
6373 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6374 if (GET_MODE_CLASS (mode
) == MODE_INT
6375 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6376 internal_error ("unsupported wide integer operation");
6381 /* Return the highest power of two that EXP is known to be a multiple of.
6382 This is used in updating alignment of MEMs in array references. */
6384 static unsigned HOST_WIDE_INT
6385 highest_pow2_factor (tree exp
)
6387 unsigned HOST_WIDE_INT c0
, c1
;
6389 switch (TREE_CODE (exp
))
6392 /* We can find the lowest bit that's a one. If the low
6393 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6394 We need to handle this case since we can find it in a COND_EXPR,
6395 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6396 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6398 if (TREE_CONSTANT_OVERFLOW (exp
))
6399 return BIGGEST_ALIGNMENT
;
6402 /* Note: tree_low_cst is intentionally not used here,
6403 we don't care about the upper bits. */
6404 c0
= TREE_INT_CST_LOW (exp
);
6406 return c0
? c0
: BIGGEST_ALIGNMENT
;
6410 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6411 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6412 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6413 return MIN (c0
, c1
);
6416 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6417 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6420 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6422 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6423 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6425 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6426 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6427 return MAX (1, c0
/ c1
);
6431 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6432 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6433 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6436 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6439 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6440 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6441 return MIN (c0
, c1
);
6450 /* Similar, except that it is known that the expression must be a multiple
6451 of the alignment of TYPE. */
6453 static unsigned HOST_WIDE_INT
6454 highest_pow2_factor_for_type (tree type
, tree exp
)
6456 unsigned HOST_WIDE_INT type_align
, factor
;
6458 factor
= highest_pow2_factor (exp
);
6459 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6460 return MAX (factor
, type_align
);
6463 /* Return an object on the placeholder list that matches EXP, a
6464 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6465 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6466 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6467 is a location which initially points to a starting location in the
6468 placeholder list (zero means start of the list) and where a pointer into
6469 the placeholder list at which the object is found is placed. */
6472 find_placeholder (tree exp
, tree
*plist
)
6474 tree type
= TREE_TYPE (exp
);
6475 tree placeholder_expr
;
6477 for (placeholder_expr
6478 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6479 placeholder_expr
!= 0;
6480 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6482 tree need_type
= TYPE_MAIN_VARIANT (type
);
6485 /* Find the outermost reference that is of the type we want. If none,
6486 see if any object has a type that is a pointer to the type we
6488 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6489 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6490 || TREE_CODE (elt
) == COND_EXPR
)
6491 ? TREE_OPERAND (elt
, 1)
6492 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6493 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6494 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6495 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6496 ? TREE_OPERAND (elt
, 0) : 0))
6497 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6500 *plist
= placeholder_expr
;
6504 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6506 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6507 || TREE_CODE (elt
) == COND_EXPR
)
6508 ? TREE_OPERAND (elt
, 1)
6509 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6510 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6511 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6512 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6513 ? TREE_OPERAND (elt
, 0) : 0))
6514 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6515 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6519 *plist
= placeholder_expr
;
6520 return build1 (INDIRECT_REF
, need_type
, elt
);
6527 /* expand_expr: generate code for computing expression EXP.
6528 An rtx for the computed value is returned. The value is never null.
6529 In the case of a void EXP, const0_rtx is returned.
6531 The value may be stored in TARGET if TARGET is nonzero.
6532 TARGET is just a suggestion; callers must assume that
6533 the rtx returned may not be the same as TARGET.
6535 If TARGET is CONST0_RTX, it means that the value will be ignored.
6537 If TMODE is not VOIDmode, it suggests generating the
6538 result in mode TMODE. But this is done only when convenient.
6539 Otherwise, TMODE is ignored and the value generated in its natural mode.
6540 TMODE is just a suggestion; callers must assume that
6541 the rtx returned may not have mode TMODE.
6543 Note that TARGET may have neither TMODE nor MODE. In that case, it
6544 probably will not be used.
6546 If MODIFIER is EXPAND_SUM then when EXP is an addition
6547 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6548 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6549 products as above, or REG or MEM, or constant.
6550 Ordinarily in such cases we would output mul or add instructions
6551 and then return a pseudo reg containing the sum.
6553 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6554 it also marks a label as absolutely required (it can't be dead).
6555 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6556 This is used for outputting expressions used in initializers.
6558 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6559 with a constant address even if that address is not normally legitimate.
6560 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6562 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6563 a call parameter. Such targets require special care as we haven't yet
6564 marked TARGET so that it's safe from being trashed by libcalls. We
6565 don't want to use TARGET for anything but the final result;
6566 Intermediate values must go elsewhere. Additionally, calls to
6567 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6570 expand_expr (tree exp
, rtx target
, enum machine_mode tmode
, enum expand_modifier modifier
)
6573 tree type
= TREE_TYPE (exp
);
6574 int unsignedp
= TREE_UNSIGNED (type
);
6575 enum machine_mode mode
;
6576 enum tree_code code
= TREE_CODE (exp
);
6578 rtx subtarget
, original_target
;
6582 /* Handle ERROR_MARK before anybody tries to access its type. */
6583 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6585 op0
= CONST0_RTX (tmode
);
6591 mode
= TYPE_MODE (type
);
6592 /* Use subtarget as the target for operand 0 of a binary operation. */
6593 subtarget
= get_subtarget (target
);
6594 original_target
= target
;
6595 ignore
= (target
== const0_rtx
6596 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6597 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6598 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6599 && TREE_CODE (type
) == VOID_TYPE
));
6601 /* If we are going to ignore this result, we need only do something
6602 if there is a side-effect somewhere in the expression. If there
6603 is, short-circuit the most common cases here. Note that we must
6604 not call expand_expr with anything but const0_rtx in case this
6605 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6609 if (! TREE_SIDE_EFFECTS (exp
))
6612 /* Ensure we reference a volatile object even if value is ignored, but
6613 don't do this if all we are doing is taking its address. */
6614 if (TREE_THIS_VOLATILE (exp
)
6615 && TREE_CODE (exp
) != FUNCTION_DECL
6616 && mode
!= VOIDmode
&& mode
!= BLKmode
6617 && modifier
!= EXPAND_CONST_ADDRESS
)
6619 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6620 if (GET_CODE (temp
) == MEM
)
6621 temp
= copy_to_reg (temp
);
6625 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6626 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6627 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6630 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6631 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6633 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6634 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6637 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6638 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6639 /* If the second operand has no side effects, just evaluate
6641 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6643 else if (code
== BIT_FIELD_REF
)
6645 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6646 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6647 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6654 #ifdef MAX_INTEGER_COMPUTATION_MODE
6655 /* Only check stuff here if the mode we want is different from the mode
6656 of the expression; if it's the same, check_max_integer_computation_mode
6657 will handle it. Do we really need to check this stuff at all? */
6660 && GET_MODE (target
) != mode
6661 && TREE_CODE (exp
) != INTEGER_CST
6662 && TREE_CODE (exp
) != PARM_DECL
6663 && TREE_CODE (exp
) != ARRAY_REF
6664 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6665 && TREE_CODE (exp
) != COMPONENT_REF
6666 && TREE_CODE (exp
) != BIT_FIELD_REF
6667 && TREE_CODE (exp
) != INDIRECT_REF
6668 && TREE_CODE (exp
) != CALL_EXPR
6669 && TREE_CODE (exp
) != VAR_DECL
6670 && TREE_CODE (exp
) != RTL_EXPR
)
6672 enum machine_mode mode
= GET_MODE (target
);
6674 if (GET_MODE_CLASS (mode
) == MODE_INT
6675 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6676 internal_error ("unsupported wide integer operation");
6680 && TREE_CODE (exp
) != INTEGER_CST
6681 && TREE_CODE (exp
) != PARM_DECL
6682 && TREE_CODE (exp
) != ARRAY_REF
6683 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6684 && TREE_CODE (exp
) != COMPONENT_REF
6685 && TREE_CODE (exp
) != BIT_FIELD_REF
6686 && TREE_CODE (exp
) != INDIRECT_REF
6687 && TREE_CODE (exp
) != VAR_DECL
6688 && TREE_CODE (exp
) != CALL_EXPR
6689 && TREE_CODE (exp
) != RTL_EXPR
6690 && GET_MODE_CLASS (tmode
) == MODE_INT
6691 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6692 internal_error ("unsupported wide integer operation");
6694 check_max_integer_computation_mode (exp
);
6697 /* If will do cse, generate all results into pseudo registers
6698 since 1) that allows cse to find more things
6699 and 2) otherwise cse could produce an insn the machine
6700 cannot support. An exception is a CONSTRUCTOR into a multi-word
6701 MEM: that's much more likely to be most efficient into the MEM.
6702 Another is a CALL_EXPR which must return in memory. */
6704 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6705 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6706 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6707 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6714 tree function
= decl_function_context (exp
);
6715 /* Labels in containing functions, or labels used from initializers,
6717 if (modifier
== EXPAND_INITIALIZER
6718 || (function
!= current_function_decl
6719 && function
!= inline_function_decl
6721 temp
= force_label_rtx (exp
);
6723 temp
= label_rtx (exp
);
6725 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6726 if (function
!= current_function_decl
6727 && function
!= inline_function_decl
&& function
!= 0)
6728 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6733 if (!DECL_RTL_SET_P (exp
))
6735 error ("%Hprior parameter's size depends on '%D'",
6736 &DECL_SOURCE_LOCATION (exp
), exp
);
6737 return CONST0_RTX (mode
);
6740 /* ... fall through ... */
6743 /* If a static var's type was incomplete when the decl was written,
6744 but the type is complete now, lay out the decl now. */
6745 if (DECL_SIZE (exp
) == 0
6746 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6747 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6748 layout_decl (exp
, 0);
6750 /* ... fall through ... */
6754 if (DECL_RTL (exp
) == 0)
6757 /* Ensure variable marked as used even if it doesn't go through
6758 a parser. If it hasn't be used yet, write out an external
6760 if (! TREE_USED (exp
))
6762 assemble_external (exp
);
6763 TREE_USED (exp
) = 1;
6766 /* Show we haven't gotten RTL for this yet. */
6769 /* Handle variables inherited from containing functions. */
6770 context
= decl_function_context (exp
);
6772 /* We treat inline_function_decl as an alias for the current function
6773 because that is the inline function whose vars, types, etc.
6774 are being merged into the current function.
6775 See expand_inline_function. */
6777 if (context
!= 0 && context
!= current_function_decl
6778 && context
!= inline_function_decl
6779 /* If var is static, we don't need a static chain to access it. */
6780 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6781 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6785 /* Mark as non-local and addressable. */
6786 DECL_NONLOCAL (exp
) = 1;
6787 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6789 (*lang_hooks
.mark_addressable
) (exp
);
6790 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6792 addr
= XEXP (DECL_RTL (exp
), 0);
6793 if (GET_CODE (addr
) == MEM
)
6795 = replace_equiv_address (addr
,
6796 fix_lexical_addr (XEXP (addr
, 0), exp
));
6798 addr
= fix_lexical_addr (addr
, exp
);
6800 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6803 /* This is the case of an array whose size is to be determined
6804 from its initializer, while the initializer is still being parsed.
6807 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6808 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6809 temp
= validize_mem (DECL_RTL (exp
));
6811 /* If DECL_RTL is memory, we are in the normal case and either
6812 the address is not valid or it is not a register and -fforce-addr
6813 is specified, get the address into a register. */
6815 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6816 && modifier
!= EXPAND_CONST_ADDRESS
6817 && modifier
!= EXPAND_SUM
6818 && modifier
!= EXPAND_INITIALIZER
6819 && (! memory_address_p (DECL_MODE (exp
),
6820 XEXP (DECL_RTL (exp
), 0))
6822 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6823 temp
= replace_equiv_address (DECL_RTL (exp
),
6824 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6826 /* If we got something, return it. But first, set the alignment
6827 if the address is a register. */
6830 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6831 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6836 /* If the mode of DECL_RTL does not match that of the decl, it
6837 must be a promoted value. We return a SUBREG of the wanted mode,
6838 but mark it so that we know that it was already extended. */
6840 if (GET_CODE (DECL_RTL (exp
)) == REG
6841 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6843 /* Get the signedness used for this variable. Ensure we get the
6844 same mode we got when the variable was declared. */
6845 if (GET_MODE (DECL_RTL (exp
))
6846 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6847 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6850 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6851 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6852 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6856 return DECL_RTL (exp
);
6859 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6860 TREE_INT_CST_HIGH (exp
), mode
);
6862 /* ??? If overflow is set, fold will have done an incomplete job,
6863 which can result in (plus xx (const_int 0)), which can get
6864 simplified by validate_replace_rtx during virtual register
6865 instantiation, which can result in unrecognizable insns.
6866 Avoid this by forcing all overflows into registers. */
6867 if (TREE_CONSTANT_OVERFLOW (exp
)
6868 && modifier
!= EXPAND_INITIALIZER
)
6869 temp
= force_reg (mode
, temp
);
6874 return const_vector_from_tree (exp
);
6877 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6880 /* If optimized, generate immediate CONST_DOUBLE
6881 which will be turned into memory by reload if necessary.
6883 We used to force a register so that loop.c could see it. But
6884 this does not allow gen_* patterns to perform optimizations with
6885 the constants. It also produces two insns in cases like "x = 1.0;".
6886 On most machines, floating-point constants are not permitted in
6887 many insns, so we'd end up copying it to a register in any case.
6889 Now, we do the copying in expand_binop, if appropriate. */
6890 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6891 TYPE_MODE (TREE_TYPE (exp
)));
6894 /* Handle evaluating a complex constant in a CONCAT target. */
6895 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6897 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6900 rtarg
= XEXP (original_target
, 0);
6901 itarg
= XEXP (original_target
, 1);
6903 /* Move the real and imaginary parts separately. */
6904 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6905 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6908 emit_move_insn (rtarg
, op0
);
6910 emit_move_insn (itarg
, op1
);
6912 return original_target
;
6915 /* ... fall through ... */
6918 temp
= output_constant_def (exp
, 1);
6920 /* temp contains a constant address.
6921 On RISC machines where a constant address isn't valid,
6922 make some insns to get that address into a register. */
6923 if (modifier
!= EXPAND_CONST_ADDRESS
6924 && modifier
!= EXPAND_INITIALIZER
6925 && modifier
!= EXPAND_SUM
6926 && (! memory_address_p (mode
, XEXP (temp
, 0))
6927 || flag_force_addr
))
6928 return replace_equiv_address (temp
,
6929 copy_rtx (XEXP (temp
, 0)));
6932 case EXPR_WITH_FILE_LOCATION
:
6935 location_t saved_loc
= input_location
;
6936 input_filename
= EXPR_WFL_FILENAME (exp
);
6937 input_line
= EXPR_WFL_LINENO (exp
);
6938 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6939 emit_line_note (input_location
);
6940 /* Possibly avoid switching back and forth here. */
6941 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6942 input_location
= saved_loc
;
6947 context
= decl_function_context (exp
);
6949 /* If this SAVE_EXPR was at global context, assume we are an
6950 initialization function and move it into our context. */
6952 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6954 /* We treat inline_function_decl as an alias for the current function
6955 because that is the inline function whose vars, types, etc.
6956 are being merged into the current function.
6957 See expand_inline_function. */
6958 if (context
== current_function_decl
|| context
== inline_function_decl
)
6961 /* If this is non-local, handle it. */
6964 /* The following call just exists to abort if the context is
6965 not of a containing function. */
6966 find_function_data (context
);
6968 temp
= SAVE_EXPR_RTL (exp
);
6969 if (temp
&& GET_CODE (temp
) == REG
)
6971 put_var_into_stack (exp
, /*rescan=*/true);
6972 temp
= SAVE_EXPR_RTL (exp
);
6974 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6977 replace_equiv_address (temp
,
6978 fix_lexical_addr (XEXP (temp
, 0), exp
));
6980 if (SAVE_EXPR_RTL (exp
) == 0)
6982 if (mode
== VOIDmode
)
6985 temp
= assign_temp (build_qualified_type (type
,
6987 | TYPE_QUAL_CONST
)),
6990 SAVE_EXPR_RTL (exp
) = temp
;
6991 if (!optimize
&& GET_CODE (temp
) == REG
)
6992 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6995 /* If the mode of TEMP does not match that of the expression, it
6996 must be a promoted value. We pass store_expr a SUBREG of the
6997 wanted mode but mark it so that we know that it was already
7000 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
7002 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7003 promote_mode (type
, mode
, &unsignedp
, 0);
7004 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7005 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7008 if (temp
== const0_rtx
)
7009 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7011 store_expr (TREE_OPERAND (exp
, 0), temp
,
7012 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7014 TREE_USED (exp
) = 1;
7017 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7018 must be a promoted value. We return a SUBREG of the wanted mode,
7019 but mark it so that we know that it was already extended. */
7021 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
7022 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
7024 /* Compute the signedness and make the proper SUBREG. */
7025 promote_mode (type
, mode
, &unsignedp
, 0);
7026 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7027 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7028 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7032 return SAVE_EXPR_RTL (exp
);
7037 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7038 TREE_OPERAND (exp
, 0)
7039 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
7043 case PLACEHOLDER_EXPR
:
7045 tree old_list
= placeholder_list
;
7046 tree placeholder_expr
= 0;
7048 exp
= find_placeholder (exp
, &placeholder_expr
);
7052 placeholder_list
= TREE_CHAIN (placeholder_expr
);
7053 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
7054 placeholder_list
= old_list
;
7058 case WITH_RECORD_EXPR
:
7059 /* Put the object on the placeholder list, expand our first operand,
7060 and pop the list. */
7061 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
7063 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
7065 placeholder_list
= TREE_CHAIN (placeholder_list
);
7069 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7070 expand_goto (TREE_OPERAND (exp
, 0));
7072 expand_computed_goto (TREE_OPERAND (exp
, 0));
7076 expand_exit_loop_if_false (NULL
,
7077 invert_truthvalue (TREE_OPERAND (exp
, 0)));
7080 case LABELED_BLOCK_EXPR
:
7081 if (LABELED_BLOCK_BODY (exp
))
7082 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
7083 /* Should perhaps use expand_label, but this is simpler and safer. */
7084 do_pending_stack_adjust ();
7085 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
7088 case EXIT_BLOCK_EXPR
:
7089 if (EXIT_BLOCK_RETURN (exp
))
7090 sorry ("returned value in block_exit_expr");
7091 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
7096 expand_start_loop (1);
7097 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
7105 tree vars
= TREE_OPERAND (exp
, 0);
7107 /* Need to open a binding contour here because
7108 if there are any cleanups they must be contained here. */
7109 expand_start_bindings (2);
7111 /* Mark the corresponding BLOCK for output in its proper place. */
7112 if (TREE_OPERAND (exp
, 2) != 0
7113 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
7114 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
7116 /* If VARS have not yet been expanded, expand them now. */
7119 if (!DECL_RTL_SET_P (vars
))
7121 expand_decl_init (vars
);
7122 vars
= TREE_CHAIN (vars
);
7125 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
7127 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7133 if (RTL_EXPR_SEQUENCE (exp
))
7135 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
7137 emit_insn (RTL_EXPR_SEQUENCE (exp
));
7138 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
7140 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
7141 free_temps_for_rtl_expr (exp
);
7142 return RTL_EXPR_RTL (exp
);
7145 /* If we don't need the result, just ensure we evaluate any
7151 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7152 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7157 /* All elts simple constants => refer to a constant in memory. But
7158 if this is a non-BLKmode mode, let it store a field at a time
7159 since that should make a CONST_INT or CONST_DOUBLE when we
7160 fold. Likewise, if we have a target we can use, it is best to
7161 store directly into the target unless the type is large enough
7162 that memcpy will be used. If we are making an initializer and
7163 all operands are constant, put it in memory as well.
7165 FIXME: Avoid trying to fill vector constructors piece-meal.
7166 Output them with output_constant_def below unless we're sure
7167 they're zeros. This should go away when vector initializers
7168 are treated like VECTOR_CST instead of arrays.
7170 else if ((TREE_STATIC (exp
)
7171 && ((mode
== BLKmode
7172 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7173 || TREE_ADDRESSABLE (exp
)
7174 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7175 && (! MOVE_BY_PIECES_P
7176 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7178 && ((TREE_CODE (type
) == VECTOR_TYPE
7179 && !is_zeros_p (exp
))
7180 || ! mostly_zeros_p (exp
)))))
7181 || ((modifier
== EXPAND_INITIALIZER
7182 || modifier
== EXPAND_CONST_ADDRESS
)
7183 && TREE_CONSTANT (exp
)))
7185 rtx constructor
= output_constant_def (exp
, 1);
7187 if (modifier
!= EXPAND_CONST_ADDRESS
7188 && modifier
!= EXPAND_INITIALIZER
7189 && modifier
!= EXPAND_SUM
)
7190 constructor
= validize_mem (constructor
);
7196 /* Handle calls that pass values in multiple non-contiguous
7197 locations. The Irix 6 ABI has examples of this. */
7198 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7199 || GET_CODE (target
) == PARALLEL
7200 || modifier
== EXPAND_STACK_PARM
)
7202 = assign_temp (build_qualified_type (type
,
7204 | (TREE_READONLY (exp
)
7205 * TYPE_QUAL_CONST
))),
7206 0, TREE_ADDRESSABLE (exp
), 1);
7208 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7214 tree exp1
= TREE_OPERAND (exp
, 0);
7216 tree string
= string_constant (exp1
, &index
);
7218 /* Try to optimize reads from const strings. */
7220 && TREE_CODE (string
) == STRING_CST
7221 && TREE_CODE (index
) == INTEGER_CST
7222 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7223 && GET_MODE_CLASS (mode
) == MODE_INT
7224 && GET_MODE_SIZE (mode
) == 1
7225 && modifier
!= EXPAND_WRITE
)
7226 return gen_int_mode (TREE_STRING_POINTER (string
)
7227 [TREE_INT_CST_LOW (index
)], mode
);
7229 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7230 op0
= memory_address (mode
, op0
);
7231 temp
= gen_rtx_MEM (mode
, op0
);
7232 set_mem_attributes (temp
, exp
, 0);
7234 /* If we are writing to this object and its type is a record with
7235 readonly fields, we must mark it as readonly so it will
7236 conflict with readonly references to those fields. */
7237 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7238 RTX_UNCHANGING_P (temp
) = 1;
7244 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7248 tree array
= TREE_OPERAND (exp
, 0);
7249 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7250 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7251 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7254 /* Optimize the special-case of a zero lower bound.
7256 We convert the low_bound to sizetype to avoid some problems
7257 with constant folding. (E.g. suppose the lower bound is 1,
7258 and its mode is QI. Without the conversion, (ARRAY
7259 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7260 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7262 if (! integer_zerop (low_bound
))
7263 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7265 /* Fold an expression like: "foo"[2].
7266 This is not done in fold so it won't happen inside &.
7267 Don't fold if this is for wide characters since it's too
7268 difficult to do correctly and this is a very rare case. */
7270 if (modifier
!= EXPAND_CONST_ADDRESS
7271 && modifier
!= EXPAND_INITIALIZER
7272 && modifier
!= EXPAND_MEMORY
7273 && TREE_CODE (array
) == STRING_CST
7274 && TREE_CODE (index
) == INTEGER_CST
7275 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7276 && GET_MODE_CLASS (mode
) == MODE_INT
7277 && GET_MODE_SIZE (mode
) == 1)
7278 return gen_int_mode (TREE_STRING_POINTER (array
)
7279 [TREE_INT_CST_LOW (index
)], mode
);
7281 /* If this is a constant index into a constant array,
7282 just get the value from the array. Handle both the cases when
7283 we have an explicit constructor and when our operand is a variable
7284 that was declared const. */
7286 if (modifier
!= EXPAND_CONST_ADDRESS
7287 && modifier
!= EXPAND_INITIALIZER
7288 && modifier
!= EXPAND_MEMORY
7289 && TREE_CODE (array
) == CONSTRUCTOR
7290 && ! TREE_SIDE_EFFECTS (array
)
7291 && TREE_CODE (index
) == INTEGER_CST
7292 && 0 > compare_tree_int (index
,
7293 list_length (CONSTRUCTOR_ELTS
7294 (TREE_OPERAND (exp
, 0)))))
7298 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7299 i
= TREE_INT_CST_LOW (index
);
7300 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7304 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7308 else if (optimize
>= 1
7309 && modifier
!= EXPAND_CONST_ADDRESS
7310 && modifier
!= EXPAND_INITIALIZER
7311 && modifier
!= EXPAND_MEMORY
7312 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7313 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7314 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7316 if (TREE_CODE (index
) == INTEGER_CST
)
7318 tree init
= DECL_INITIAL (array
);
7320 if (TREE_CODE (init
) == CONSTRUCTOR
)
7324 for (elem
= CONSTRUCTOR_ELTS (init
);
7326 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7327 elem
= TREE_CHAIN (elem
))
7330 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7331 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7334 else if (TREE_CODE (init
) == STRING_CST
7335 && 0 > compare_tree_int (index
,
7336 TREE_STRING_LENGTH (init
)))
7338 tree type
= TREE_TYPE (TREE_TYPE (init
));
7339 enum machine_mode mode
= TYPE_MODE (type
);
7341 if (GET_MODE_CLASS (mode
) == MODE_INT
7342 && GET_MODE_SIZE (mode
) == 1)
7343 return gen_int_mode (TREE_STRING_POINTER (init
)
7344 [TREE_INT_CST_LOW (index
)], mode
);
7349 goto normal_inner_ref
;
7352 /* If the operand is a CONSTRUCTOR, we can just extract the
7353 appropriate field if it is present. */
7354 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7358 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7359 elt
= TREE_CHAIN (elt
))
7360 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7361 /* We can normally use the value of the field in the
7362 CONSTRUCTOR. However, if this is a bitfield in
7363 an integral mode that we can fit in a HOST_WIDE_INT,
7364 we must mask only the number of bits in the bitfield,
7365 since this is done implicitly by the constructor. If
7366 the bitfield does not meet either of those conditions,
7367 we can't do this optimization. */
7368 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7369 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7371 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7372 <= HOST_BITS_PER_WIDE_INT
))))
7374 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7375 && modifier
== EXPAND_STACK_PARM
)
7377 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7378 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7380 HOST_WIDE_INT bitsize
7381 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7382 enum machine_mode imode
7383 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7385 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7387 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7388 op0
= expand_and (imode
, op0
, op1
, target
);
7393 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7396 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7398 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7406 goto normal_inner_ref
;
7409 case ARRAY_RANGE_REF
:
7412 enum machine_mode mode1
;
7413 HOST_WIDE_INT bitsize
, bitpos
;
7416 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7417 &mode1
, &unsignedp
, &volatilep
);
7420 /* If we got back the original object, something is wrong. Perhaps
7421 we are evaluating an expression too early. In any event, don't
7422 infinitely recurse. */
7426 /* If TEM's type is a union of variable size, pass TARGET to the inner
7427 computation, since it will need a temporary and TARGET is known
7428 to have to do. This occurs in unchecked conversion in Ada. */
7432 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7433 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7435 && modifier
!= EXPAND_STACK_PARM
7436 ? target
: NULL_RTX
),
7438 (modifier
== EXPAND_INITIALIZER
7439 || modifier
== EXPAND_CONST_ADDRESS
7440 || modifier
== EXPAND_STACK_PARM
)
7441 ? modifier
: EXPAND_NORMAL
);
7443 /* If this is a constant, put it into a register if it is a
7444 legitimate constant and OFFSET is 0 and memory if it isn't. */
7445 if (CONSTANT_P (op0
))
7447 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7448 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7450 op0
= force_reg (mode
, op0
);
7452 op0
= validize_mem (force_const_mem (mode
, op0
));
7455 /* Otherwise, if this object not in memory and we either have an
7456 offset or a BLKmode result, put it there. This case can't occur in
7457 C, but can in Ada if we have unchecked conversion of an expression
7458 from a scalar type to an array or record type or for an
7459 ARRAY_RANGE_REF whose type is BLKmode. */
7460 else if (GET_CODE (op0
) != MEM
7462 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7464 /* If the operand is a SAVE_EXPR, we can deal with this by
7465 forcing the SAVE_EXPR into memory. */
7466 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7468 put_var_into_stack (TREE_OPERAND (exp
, 0),
7470 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7475 = build_qualified_type (TREE_TYPE (tem
),
7476 (TYPE_QUALS (TREE_TYPE (tem
))
7477 | TYPE_QUAL_CONST
));
7478 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7480 emit_move_insn (memloc
, op0
);
7487 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7490 if (GET_CODE (op0
) != MEM
)
7493 #ifdef POINTERS_EXTEND_UNSIGNED
7494 if (GET_MODE (offset_rtx
) != Pmode
)
7495 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7497 if (GET_MODE (offset_rtx
) != ptr_mode
)
7498 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7501 /* A constant address in OP0 can have VOIDmode, we must not try
7502 to call force_reg for that case. Avoid that case. */
7503 if (GET_CODE (op0
) == MEM
7504 && GET_MODE (op0
) == BLKmode
7505 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7507 && (bitpos
% bitsize
) == 0
7508 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7509 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7511 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7515 op0
= offset_address (op0
, offset_rtx
,
7516 highest_pow2_factor (offset
));
7519 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7520 record its alignment as BIGGEST_ALIGNMENT. */
7521 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7522 && is_aligning_offset (offset
, tem
))
7523 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7525 /* Don't forget about volatility even if this is a bitfield. */
7526 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7528 if (op0
== orig_op0
)
7529 op0
= copy_rtx (op0
);
7531 MEM_VOLATILE_P (op0
) = 1;
7534 /* The following code doesn't handle CONCAT.
7535 Assume only bitpos == 0 can be used for CONCAT, due to
7536 one element arrays having the same mode as its element. */
7537 if (GET_CODE (op0
) == CONCAT
)
7539 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7544 /* In cases where an aligned union has an unaligned object
7545 as a field, we might be extracting a BLKmode value from
7546 an integer-mode (e.g., SImode) object. Handle this case
7547 by doing the extract into an object as wide as the field
7548 (which we know to be the width of a basic mode), then
7549 storing into memory, and changing the mode to BLKmode. */
7550 if (mode1
== VOIDmode
7551 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7552 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7553 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7554 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7555 && modifier
!= EXPAND_CONST_ADDRESS
7556 && modifier
!= EXPAND_INITIALIZER
)
7557 /* If the field isn't aligned enough to fetch as a memref,
7558 fetch it as a bit field. */
7559 || (mode1
!= BLKmode
7560 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7561 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7562 && ((modifier
== EXPAND_CONST_ADDRESS
7563 || modifier
== EXPAND_INITIALIZER
)
7565 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7566 || (bitpos
% BITS_PER_UNIT
!= 0)))
7567 /* If the type and the field are a constant size and the
7568 size of the type isn't the same size as the bitfield,
7569 we must use bitfield operations. */
7571 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7573 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7576 enum machine_mode ext_mode
= mode
;
7578 if (ext_mode
== BLKmode
7579 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7580 && GET_CODE (target
) == MEM
7581 && bitpos
% BITS_PER_UNIT
== 0))
7582 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7584 if (ext_mode
== BLKmode
)
7586 /* In this case, BITPOS must start at a byte boundary and
7587 TARGET, if specified, must be a MEM. */
7588 if (GET_CODE (op0
) != MEM
7589 || (target
!= 0 && GET_CODE (target
) != MEM
)
7590 || bitpos
% BITS_PER_UNIT
!= 0)
7593 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7595 target
= assign_temp (type
, 0, 1, 1);
7597 emit_block_move (target
, op0
,
7598 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7600 (modifier
== EXPAND_STACK_PARM
7601 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7606 op0
= validize_mem (op0
);
7608 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7609 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7611 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7612 (modifier
== EXPAND_STACK_PARM
7613 ? NULL_RTX
: target
),
7615 int_size_in_bytes (TREE_TYPE (tem
)));
7617 /* If the result is a record type and BITSIZE is narrower than
7618 the mode of OP0, an integral mode, and this is a big endian
7619 machine, we must put the field into the high-order bits. */
7620 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7621 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7622 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7623 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7624 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7628 if (mode
== BLKmode
)
7630 rtx
new = assign_temp (build_qualified_type
7631 ((*lang_hooks
.types
.type_for_mode
)
7633 TYPE_QUAL_CONST
), 0, 1, 1);
7635 emit_move_insn (new, op0
);
7636 op0
= copy_rtx (new);
7637 PUT_MODE (op0
, BLKmode
);
7638 set_mem_attributes (op0
, exp
, 1);
7644 /* If the result is BLKmode, use that to access the object
7646 if (mode
== BLKmode
)
7649 /* Get a reference to just this component. */
7650 if (modifier
== EXPAND_CONST_ADDRESS
7651 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7652 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7654 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7656 if (op0
== orig_op0
)
7657 op0
= copy_rtx (op0
);
7659 set_mem_attributes (op0
, exp
, 0);
7660 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7661 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7663 MEM_VOLATILE_P (op0
) |= volatilep
;
7664 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7665 || modifier
== EXPAND_CONST_ADDRESS
7666 || modifier
== EXPAND_INITIALIZER
)
7668 else if (target
== 0)
7669 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7671 convert_move (target
, op0
, unsignedp
);
7677 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7679 /* Evaluate the interior expression. */
7680 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7683 /* Get or create an instruction off which to hang a note. */
7684 if (REG_P (subtarget
))
7687 insn
= get_last_insn ();
7690 if (! INSN_P (insn
))
7691 insn
= prev_nonnote_insn (insn
);
7695 target
= gen_reg_rtx (GET_MODE (subtarget
));
7696 insn
= emit_move_insn (target
, subtarget
);
7699 /* Collect the data for the note. */
7700 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7701 vtbl_ref
= plus_constant (vtbl_ref
,
7702 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7703 /* Discard the initial CONST that was added. */
7704 vtbl_ref
= XEXP (vtbl_ref
, 0);
7707 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7712 /* Intended for a reference to a buffer of a file-object in Pascal.
7713 But it's not certain that a special tree code will really be
7714 necessary for these. INDIRECT_REF might work for them. */
7720 /* Pascal set IN expression.
7723 rlo = set_low - (set_low%bits_per_word);
7724 the_word = set [ (index - rlo)/bits_per_word ];
7725 bit_index = index % bits_per_word;
7726 bitmask = 1 << bit_index;
7727 return !!(the_word & bitmask); */
7729 tree set
= TREE_OPERAND (exp
, 0);
7730 tree index
= TREE_OPERAND (exp
, 1);
7731 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7732 tree set_type
= TREE_TYPE (set
);
7733 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7734 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7735 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7736 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7737 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7738 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7739 rtx setaddr
= XEXP (setval
, 0);
7740 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7742 rtx diff
, quo
, rem
, addr
, bit
, result
;
7744 /* If domain is empty, answer is no. Likewise if index is constant
7745 and out of bounds. */
7746 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7747 && TREE_CODE (set_low_bound
) == INTEGER_CST
7748 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7749 || (TREE_CODE (index
) == INTEGER_CST
7750 && TREE_CODE (set_low_bound
) == INTEGER_CST
7751 && tree_int_cst_lt (index
, set_low_bound
))
7752 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7753 && TREE_CODE (index
) == INTEGER_CST
7754 && tree_int_cst_lt (set_high_bound
, index
))))
7758 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7760 /* If we get here, we have to generate the code for both cases
7761 (in range and out of range). */
7763 op0
= gen_label_rtx ();
7764 op1
= gen_label_rtx ();
7766 if (! (GET_CODE (index_val
) == CONST_INT
7767 && GET_CODE (lo_r
) == CONST_INT
))
7768 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7769 GET_MODE (index_val
), iunsignedp
, op1
);
7771 if (! (GET_CODE (index_val
) == CONST_INT
7772 && GET_CODE (hi_r
) == CONST_INT
))
7773 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7774 GET_MODE (index_val
), iunsignedp
, op1
);
7776 /* Calculate the element number of bit zero in the first word
7778 if (GET_CODE (lo_r
) == CONST_INT
)
7779 rlow
= GEN_INT (INTVAL (lo_r
)
7780 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7782 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7783 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7784 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7786 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7787 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7789 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7790 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7791 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7792 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7794 addr
= memory_address (byte_mode
,
7795 expand_binop (index_mode
, add_optab
, diff
,
7796 setaddr
, NULL_RTX
, iunsignedp
,
7799 /* Extract the bit we want to examine. */
7800 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7801 gen_rtx_MEM (byte_mode
, addr
),
7802 make_tree (TREE_TYPE (index
), rem
),
7804 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7805 GET_MODE (target
) == byte_mode
? target
: 0,
7806 1, OPTAB_LIB_WIDEN
);
7808 if (result
!= target
)
7809 convert_move (target
, result
, 1);
7811 /* Output the code to handle the out-of-range case. */
7814 emit_move_insn (target
, const0_rtx
);
7819 case WITH_CLEANUP_EXPR
:
7820 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7822 WITH_CLEANUP_EXPR_RTL (exp
)
7823 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7824 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7825 CLEANUP_EH_ONLY (exp
));
7827 /* That's it for this cleanup. */
7828 TREE_OPERAND (exp
, 1) = 0;
7830 return WITH_CLEANUP_EXPR_RTL (exp
);
7832 case CLEANUP_POINT_EXPR
:
7834 /* Start a new binding layer that will keep track of all cleanup
7835 actions to be performed. */
7836 expand_start_bindings (2);
7838 target_temp_slot_level
= temp_slot_level
;
7840 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7841 /* If we're going to use this value, load it up now. */
7843 op0
= force_not_mem (op0
);
7844 preserve_temp_slots (op0
);
7845 expand_end_bindings (NULL_TREE
, 0, 0);
7850 /* Check for a built-in function. */
7851 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7852 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7854 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7856 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7857 == BUILT_IN_FRONTEND
)
7858 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7861 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7864 return expand_call (exp
, target
, ignore
);
7866 case NON_LVALUE_EXPR
:
7869 case REFERENCE_EXPR
:
7870 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7873 if (TREE_CODE (type
) == UNION_TYPE
)
7875 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7877 /* If both input and output are BLKmode, this conversion isn't doing
7878 anything except possibly changing memory attribute. */
7879 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7881 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7884 result
= copy_rtx (result
);
7885 set_mem_attributes (result
, exp
, 0);
7890 target
= assign_temp (type
, 0, 1, 1);
7892 if (GET_CODE (target
) == MEM
)
7893 /* Store data into beginning of memory target. */
7894 store_expr (TREE_OPERAND (exp
, 0),
7895 adjust_address (target
, TYPE_MODE (valtype
), 0),
7896 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7898 else if (GET_CODE (target
) == REG
)
7899 /* Store this field into a union of the proper type. */
7900 store_field (target
,
7901 MIN ((int_size_in_bytes (TREE_TYPE
7902 (TREE_OPERAND (exp
, 0)))
7904 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7905 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7906 VOIDmode
, 0, type
, 0);
7910 /* Return the entire union. */
7914 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7916 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7919 /* If the signedness of the conversion differs and OP0 is
7920 a promoted SUBREG, clear that indication since we now
7921 have to do the proper extension. */
7922 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7923 && GET_CODE (op0
) == SUBREG
)
7924 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7929 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7930 if (GET_MODE (op0
) == mode
)
7933 /* If OP0 is a constant, just convert it into the proper mode. */
7934 if (CONSTANT_P (op0
))
7936 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7937 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7939 if (modifier
== EXPAND_INITIALIZER
)
7940 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7941 subreg_lowpart_offset (mode
,
7944 return convert_modes (mode
, inner_mode
, op0
,
7945 TREE_UNSIGNED (inner_type
));
7948 if (modifier
== EXPAND_INITIALIZER
)
7949 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7953 convert_to_mode (mode
, op0
,
7954 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7956 convert_move (target
, op0
,
7957 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7960 case VIEW_CONVERT_EXPR
:
7961 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7963 /* If the input and output modes are both the same, we are done.
7964 Otherwise, if neither mode is BLKmode and both are integral and within
7965 a word, we can use gen_lowpart. If neither is true, make sure the
7966 operand is in memory and convert the MEM to the new mode. */
7967 if (TYPE_MODE (type
) == GET_MODE (op0
))
7969 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7970 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7971 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7972 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7973 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7974 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7975 else if (GET_CODE (op0
) != MEM
)
7977 /* If the operand is not a MEM, force it into memory. Since we
7978 are going to be be changing the mode of the MEM, don't call
7979 force_const_mem for constants because we don't allow pool
7980 constants to change mode. */
7981 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7983 if (TREE_ADDRESSABLE (exp
))
7986 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7988 = assign_stack_temp_for_type
7989 (TYPE_MODE (inner_type
),
7990 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7992 emit_move_insn (target
, op0
);
7996 /* At this point, OP0 is in the correct mode. If the output type is such
7997 that the operand is known to be aligned, indicate that it is.
7998 Otherwise, we need only be concerned about alignment for non-BLKmode
8000 if (GET_CODE (op0
) == MEM
)
8002 op0
= copy_rtx (op0
);
8004 if (TYPE_ALIGN_OK (type
))
8005 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
8006 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
8007 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
8009 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8010 HOST_WIDE_INT temp_size
8011 = MAX (int_size_in_bytes (inner_type
),
8012 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
8013 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
8014 temp_size
, 0, type
);
8015 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
8017 if (TREE_ADDRESSABLE (exp
))
8020 if (GET_MODE (op0
) == BLKmode
)
8021 emit_block_move (new_with_op0_mode
, op0
,
8022 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
8023 (modifier
== EXPAND_STACK_PARM
8024 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8026 emit_move_insn (new_with_op0_mode
, op0
);
8031 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
8037 this_optab
= ! unsignedp
&& flag_trapv
8038 && (GET_MODE_CLASS (mode
) == MODE_INT
)
8039 ? addv_optab
: add_optab
;
8041 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8042 something else, make sure we add the register to the constant and
8043 then to the other thing. This case can occur during strength
8044 reduction and doing it this way will produce better code if the
8045 frame pointer or argument pointer is eliminated.
8047 fold-const.c will ensure that the constant is always in the inner
8048 PLUS_EXPR, so the only case we need to do anything about is if
8049 sp, ap, or fp is our second argument, in which case we must swap
8050 the innermost first argument and our second argument. */
8052 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8053 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8054 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
8055 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8056 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8057 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8059 tree t
= TREE_OPERAND (exp
, 1);
8061 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8062 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8065 /* If the result is to be ptr_mode and we are adding an integer to
8066 something, we might be forming a constant. So try to use
8067 plus_constant. If it produces a sum and we can't accept it,
8068 use force_operand. This allows P = &ARR[const] to generate
8069 efficient code on machines where a SYMBOL_REF is not a valid
8072 If this is an EXPAND_SUM call, always return the sum. */
8073 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8074 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8076 if (modifier
== EXPAND_STACK_PARM
)
8078 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8079 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8080 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8084 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8086 /* Use immed_double_const to ensure that the constant is
8087 truncated according to the mode of OP1, then sign extended
8088 to a HOST_WIDE_INT. Using the constant directly can result
8089 in non-canonical RTL in a 64x32 cross compile. */
8091 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8093 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8094 op1
= plus_constant (op1
, INTVAL (constant_part
));
8095 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8096 op1
= force_operand (op1
, target
);
8100 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8101 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
8102 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8106 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8107 (modifier
== EXPAND_INITIALIZER
8108 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8109 if (! CONSTANT_P (op0
))
8111 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8112 VOIDmode
, modifier
);
8113 /* Return a PLUS if modifier says it's OK. */
8114 if (modifier
== EXPAND_SUM
8115 || modifier
== EXPAND_INITIALIZER
)
8116 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8119 /* Use immed_double_const to ensure that the constant is
8120 truncated according to the mode of OP1, then sign extended
8121 to a HOST_WIDE_INT. Using the constant directly can result
8122 in non-canonical RTL in a 64x32 cross compile. */
8124 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8126 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8127 op0
= plus_constant (op0
, INTVAL (constant_part
));
8128 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8129 op0
= force_operand (op0
, target
);
8134 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8137 /* No sense saving up arithmetic to be done
8138 if it's all in the wrong mode to form part of an address.
8139 And force_operand won't know whether to sign-extend or
8141 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8142 || mode
!= ptr_mode
)
8144 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8145 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8146 TREE_OPERAND (exp
, 1), 0))
8147 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8150 if (op0
== const0_rtx
)
8152 if (op1
== const0_rtx
)
8157 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8158 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8159 TREE_OPERAND (exp
, 1), 0))
8160 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8161 VOIDmode
, modifier
);
8164 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8167 /* For initializers, we are allowed to return a MINUS of two
8168 symbolic constants. Here we handle all cases when both operands
8170 /* Handle difference of two symbolic constants,
8171 for the sake of an initializer. */
8172 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8173 && really_constant_p (TREE_OPERAND (exp
, 0))
8174 && really_constant_p (TREE_OPERAND (exp
, 1)))
8176 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8178 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8181 /* If the last operand is a CONST_INT, use plus_constant of
8182 the negated constant. Else make the MINUS. */
8183 if (GET_CODE (op1
) == CONST_INT
)
8184 return plus_constant (op0
, - INTVAL (op1
));
8186 return gen_rtx_MINUS (mode
, op0
, op1
);
8189 this_optab
= ! unsignedp
&& flag_trapv
8190 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8191 ? subv_optab
: sub_optab
;
8193 /* No sense saving up arithmetic to be done
8194 if it's all in the wrong mode to form part of an address.
8195 And force_operand won't know whether to sign-extend or
8197 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8198 || mode
!= ptr_mode
)
8201 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8204 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8205 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8207 /* Convert A - const to A + (-const). */
8208 if (GET_CODE (op1
) == CONST_INT
)
8210 op1
= negate_rtx (mode
, op1
);
8211 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8217 /* If first operand is constant, swap them.
8218 Thus the following special case checks need only
8219 check the second operand. */
8220 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8222 tree t1
= TREE_OPERAND (exp
, 0);
8223 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8224 TREE_OPERAND (exp
, 1) = t1
;
8227 /* Attempt to return something suitable for generating an
8228 indexed address, for machines that support that. */
8230 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8231 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8233 tree exp1
= TREE_OPERAND (exp
, 1);
8235 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8238 /* If we knew for certain that this is arithmetic for an array
8239 reference, and we knew the bounds of the array, then we could
8240 apply the distributive law across (PLUS X C) for constant C.
8241 Without such knowledge, we risk overflowing the computation
8242 when both X and C are large, but X+C isn't. */
8243 /* ??? Could perhaps special-case EXP being unsigned and C being
8244 positive. In that case we are certain that X+C is no smaller
8245 than X and so the transformed expression will overflow iff the
8246 original would have. */
8248 if (GET_CODE (op0
) != REG
)
8249 op0
= force_operand (op0
, NULL_RTX
);
8250 if (GET_CODE (op0
) != REG
)
8251 op0
= copy_to_mode_reg (mode
, op0
);
8253 return gen_rtx_MULT (mode
, op0
,
8254 gen_int_mode (tree_low_cst (exp1
, 0),
8255 TYPE_MODE (TREE_TYPE (exp1
))));
8258 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8261 if (modifier
== EXPAND_STACK_PARM
)
8264 /* Check for multiplying things that have been extended
8265 from a narrower type. If this machine supports multiplying
8266 in that narrower type with a result in the desired type,
8267 do it that way, and avoid the explicit type-conversion. */
8268 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8269 && TREE_CODE (type
) == INTEGER_TYPE
8270 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8271 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8272 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8273 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8274 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8275 /* Don't use a widening multiply if a shift will do. */
8276 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8277 > HOST_BITS_PER_WIDE_INT
)
8278 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8280 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8281 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8283 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8284 /* If both operands are extended, they must either both
8285 be zero-extended or both be sign-extended. */
8286 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8288 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8290 enum machine_mode innermode
8291 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8292 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8293 ? smul_widen_optab
: umul_widen_optab
);
8294 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8295 ? umul_widen_optab
: smul_widen_optab
);
8296 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8298 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8300 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8301 NULL_RTX
, VOIDmode
, 0);
8302 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8303 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8306 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8307 NULL_RTX
, VOIDmode
, 0);
8310 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8311 && innermode
== word_mode
)
8314 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8315 NULL_RTX
, VOIDmode
, 0);
8316 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8317 op1
= convert_modes (innermode
, mode
,
8318 expand_expr (TREE_OPERAND (exp
, 1),
8319 NULL_RTX
, VOIDmode
, 0),
8322 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8323 NULL_RTX
, VOIDmode
, 0);
8324 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8325 unsignedp
, OPTAB_LIB_WIDEN
);
8326 htem
= expand_mult_highpart_adjust (innermode
,
8327 gen_highpart (innermode
, temp
),
8329 gen_highpart (innermode
, temp
),
8331 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8336 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8337 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8338 TREE_OPERAND (exp
, 1), 0))
8339 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8342 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8344 case TRUNC_DIV_EXPR
:
8345 case FLOOR_DIV_EXPR
:
8347 case ROUND_DIV_EXPR
:
8348 case EXACT_DIV_EXPR
:
8349 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8351 if (modifier
== EXPAND_STACK_PARM
)
8353 /* Possible optimization: compute the dividend with EXPAND_SUM
8354 then if the divisor is constant can optimize the case
8355 where some terms of the dividend have coeffs divisible by it. */
8356 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8357 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8358 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8361 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8362 expensive divide. If not, combine will rebuild the original
8364 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8365 && TREE_CODE (type
) == REAL_TYPE
8366 && !real_onep (TREE_OPERAND (exp
, 0)))
8367 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8368 build (RDIV_EXPR
, type
,
8369 build_real (type
, dconst1
),
8370 TREE_OPERAND (exp
, 1))),
8371 target
, tmode
, modifier
);
8372 this_optab
= sdiv_optab
;
8375 case TRUNC_MOD_EXPR
:
8376 case FLOOR_MOD_EXPR
:
8378 case ROUND_MOD_EXPR
:
8379 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8381 if (modifier
== EXPAND_STACK_PARM
)
8383 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8384 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8385 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8387 case FIX_ROUND_EXPR
:
8388 case FIX_FLOOR_EXPR
:
8390 abort (); /* Not used for C. */
8392 case FIX_TRUNC_EXPR
:
8393 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8394 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8395 target
= gen_reg_rtx (mode
);
8396 expand_fix (target
, op0
, unsignedp
);
8400 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8401 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8402 target
= gen_reg_rtx (mode
);
8403 /* expand_float can't figure out what to do if FROM has VOIDmode.
8404 So give it the correct mode. With -O, cse will optimize this. */
8405 if (GET_MODE (op0
) == VOIDmode
)
8406 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8408 expand_float (target
, op0
,
8409 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8413 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8414 if (modifier
== EXPAND_STACK_PARM
)
8416 temp
= expand_unop (mode
,
8417 ! unsignedp
&& flag_trapv
8418 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8419 ? negv_optab
: neg_optab
, op0
, target
, 0);
8425 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8426 if (modifier
== EXPAND_STACK_PARM
)
8429 /* ABS_EXPR is not valid for complex arguments. */
8430 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8431 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8434 /* Unsigned abs is simply the operand. Testing here means we don't
8435 risk generating incorrect code below. */
8436 if (TREE_UNSIGNED (type
))
8439 return expand_abs (mode
, op0
, target
, unsignedp
,
8440 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8444 target
= original_target
;
8446 || modifier
== EXPAND_STACK_PARM
8447 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8448 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8449 || GET_MODE (target
) != mode
8450 || (GET_CODE (target
) == REG
8451 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8452 target
= gen_reg_rtx (mode
);
8453 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8454 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8456 /* First try to do it with a special MIN or MAX instruction.
8457 If that does not win, use a conditional jump to select the proper
8459 this_optab
= (TREE_UNSIGNED (type
)
8460 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8461 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8463 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8468 /* At this point, a MEM target is no longer useful; we will get better
8471 if (GET_CODE (target
) == MEM
)
8472 target
= gen_reg_rtx (mode
);
8475 emit_move_insn (target
, op0
);
8477 op0
= gen_label_rtx ();
8479 /* If this mode is an integer too wide to compare properly,
8480 compare word by word. Rely on cse to optimize constant cases. */
8481 if (GET_MODE_CLASS (mode
) == MODE_INT
8482 && ! can_compare_p (GE
, mode
, ccp_jump
))
8484 if (code
== MAX_EXPR
)
8485 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8486 target
, op1
, NULL_RTX
, op0
);
8488 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8489 op1
, target
, NULL_RTX
, op0
);
8493 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8494 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8495 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8498 emit_move_insn (target
, op1
);
8503 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8504 if (modifier
== EXPAND_STACK_PARM
)
8506 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8512 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8513 if (modifier
== EXPAND_STACK_PARM
)
8515 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8521 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8522 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8528 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8529 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8535 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8536 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8542 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8543 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8548 /* ??? Can optimize bitwise operations with one arg constant.
8549 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8550 and (a bitwise1 b) bitwise2 b (etc)
8551 but that is probably not worth while. */
8553 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8554 boolean values when we want in all cases to compute both of them. In
8555 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8556 as actual zero-or-1 values and then bitwise anding. In cases where
8557 there cannot be any side effects, better code would be made by
8558 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8559 how to recognize those cases. */
8561 case TRUTH_AND_EXPR
:
8563 this_optab
= and_optab
;
8568 this_optab
= ior_optab
;
8571 case TRUTH_XOR_EXPR
:
8573 this_optab
= xor_optab
;
8580 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8582 if (modifier
== EXPAND_STACK_PARM
)
8584 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8585 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8588 /* Could determine the answer when only additive constants differ. Also,
8589 the addition of one can be handled by changing the condition. */
8596 case UNORDERED_EXPR
:
8603 temp
= do_store_flag (exp
,
8604 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8605 tmode
!= VOIDmode
? tmode
: mode
, 0);
8609 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8610 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8612 && GET_CODE (original_target
) == REG
8613 && (GET_MODE (original_target
)
8614 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8616 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8619 /* If temp is constant, we can just compute the result. */
8620 if (GET_CODE (temp
) == CONST_INT
)
8622 if (INTVAL (temp
) != 0)
8623 emit_move_insn (target
, const1_rtx
);
8625 emit_move_insn (target
, const0_rtx
);
8630 if (temp
!= original_target
)
8632 enum machine_mode mode1
= GET_MODE (temp
);
8633 if (mode1
== VOIDmode
)
8634 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8636 temp
= copy_to_mode_reg (mode1
, temp
);
8639 op1
= gen_label_rtx ();
8640 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8641 GET_MODE (temp
), unsignedp
, op1
);
8642 emit_move_insn (temp
, const1_rtx
);
8647 /* If no set-flag instruction, must generate a conditional
8648 store into a temporary variable. Drop through
8649 and handle this like && and ||. */
8651 case TRUTH_ANDIF_EXPR
:
8652 case TRUTH_ORIF_EXPR
:
8655 || modifier
== EXPAND_STACK_PARM
8656 || ! safe_from_p (target
, exp
, 1)
8657 /* Make sure we don't have a hard reg (such as function's return
8658 value) live across basic blocks, if not optimizing. */
8659 || (!optimize
&& GET_CODE (target
) == REG
8660 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8661 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8664 emit_clr_insn (target
);
8666 op1
= gen_label_rtx ();
8667 jumpifnot (exp
, op1
);
8670 emit_0_to_1_insn (target
);
8673 return ignore
? const0_rtx
: target
;
8675 case TRUTH_NOT_EXPR
:
8676 if (modifier
== EXPAND_STACK_PARM
)
8678 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8679 /* The parser is careful to generate TRUTH_NOT_EXPR
8680 only with operands that are always zero or one. */
8681 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8682 target
, 1, OPTAB_LIB_WIDEN
);
8688 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8690 return expand_expr (TREE_OPERAND (exp
, 1),
8691 (ignore
? const0_rtx
: target
),
8692 VOIDmode
, modifier
);
8695 /* If we would have a "singleton" (see below) were it not for a
8696 conversion in each arm, bring that conversion back out. */
8697 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8698 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8699 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8700 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8702 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8703 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8705 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8706 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8707 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8708 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8709 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8710 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8711 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8712 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8713 return expand_expr (build1 (NOP_EXPR
, type
,
8714 build (COND_EXPR
, TREE_TYPE (iftrue
),
8715 TREE_OPERAND (exp
, 0),
8717 target
, tmode
, modifier
);
8721 /* Note that COND_EXPRs whose type is a structure or union
8722 are required to be constructed to contain assignments of
8723 a temporary variable, so that we can evaluate them here
8724 for side effect only. If type is void, we must do likewise. */
8726 /* If an arm of the branch requires a cleanup,
8727 only that cleanup is performed. */
8730 tree binary_op
= 0, unary_op
= 0;
8732 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8733 convert it to our mode, if necessary. */
8734 if (integer_onep (TREE_OPERAND (exp
, 1))
8735 && integer_zerop (TREE_OPERAND (exp
, 2))
8736 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8740 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8745 if (modifier
== EXPAND_STACK_PARM
)
8747 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8748 if (GET_MODE (op0
) == mode
)
8752 target
= gen_reg_rtx (mode
);
8753 convert_move (target
, op0
, unsignedp
);
8757 /* Check for X ? A + B : A. If we have this, we can copy A to the
8758 output and conditionally add B. Similarly for unary operations.
8759 Don't do this if X has side-effects because those side effects
8760 might affect A or B and the "?" operation is a sequence point in
8761 ANSI. (operand_equal_p tests for side effects.) */
8763 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8764 && operand_equal_p (TREE_OPERAND (exp
, 2),
8765 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8766 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8767 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8768 && operand_equal_p (TREE_OPERAND (exp
, 1),
8769 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8770 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8771 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8772 && operand_equal_p (TREE_OPERAND (exp
, 2),
8773 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8774 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8775 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8776 && operand_equal_p (TREE_OPERAND (exp
, 1),
8777 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8778 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8780 /* If we are not to produce a result, we have no target. Otherwise,
8781 if a target was specified use it; it will not be used as an
8782 intermediate target unless it is safe. If no target, use a
8787 else if (modifier
== EXPAND_STACK_PARM
)
8788 temp
= assign_temp (type
, 0, 0, 1);
8789 else if (original_target
8790 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8791 || (singleton
&& GET_CODE (original_target
) == REG
8792 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8793 && original_target
== var_rtx (singleton
)))
8794 && GET_MODE (original_target
) == mode
8795 #ifdef HAVE_conditional_move
8796 && (! can_conditionally_move_p (mode
)
8797 || GET_CODE (original_target
) == REG
8798 || TREE_ADDRESSABLE (type
))
8800 && (GET_CODE (original_target
) != MEM
8801 || TREE_ADDRESSABLE (type
)))
8802 temp
= original_target
;
8803 else if (TREE_ADDRESSABLE (type
))
8806 temp
= assign_temp (type
, 0, 0, 1);
8808 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8809 do the test of X as a store-flag operation, do this as
8810 A + ((X != 0) << log C). Similarly for other simple binary
8811 operators. Only do for C == 1 if BRANCH_COST is low. */
8812 if (temp
&& singleton
&& binary_op
8813 && (TREE_CODE (binary_op
) == PLUS_EXPR
8814 || TREE_CODE (binary_op
) == MINUS_EXPR
8815 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8816 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8817 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8818 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8819 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8823 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8824 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8825 ? addv_optab
: add_optab
)
8826 : TREE_CODE (binary_op
) == MINUS_EXPR
8827 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8828 ? subv_optab
: sub_optab
)
8829 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8832 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8833 if (singleton
== TREE_OPERAND (exp
, 1))
8834 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8836 cond
= TREE_OPERAND (exp
, 0);
8838 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8840 mode
, BRANCH_COST
<= 1);
8842 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8843 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8844 build_int_2 (tree_log2
8848 (safe_from_p (temp
, singleton
, 1)
8849 ? temp
: NULL_RTX
), 0);
8853 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8854 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8855 unsignedp
, OPTAB_LIB_WIDEN
);
8859 do_pending_stack_adjust ();
8861 op0
= gen_label_rtx ();
8863 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8867 /* If the target conflicts with the other operand of the
8868 binary op, we can't use it. Also, we can't use the target
8869 if it is a hard register, because evaluating the condition
8870 might clobber it. */
8872 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8873 || (GET_CODE (temp
) == REG
8874 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8875 temp
= gen_reg_rtx (mode
);
8876 store_expr (singleton
, temp
,
8877 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8880 expand_expr (singleton
,
8881 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8882 if (singleton
== TREE_OPERAND (exp
, 1))
8883 jumpif (TREE_OPERAND (exp
, 0), op0
);
8885 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8887 start_cleanup_deferral ();
8888 if (binary_op
&& temp
== 0)
8889 /* Just touch the other operand. */
8890 expand_expr (TREE_OPERAND (binary_op
, 1),
8891 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8893 store_expr (build (TREE_CODE (binary_op
), type
,
8894 make_tree (type
, temp
),
8895 TREE_OPERAND (binary_op
, 1)),
8896 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8898 store_expr (build1 (TREE_CODE (unary_op
), type
,
8899 make_tree (type
, temp
)),
8900 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8903 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8904 comparison operator. If we have one of these cases, set the
8905 output to A, branch on A (cse will merge these two references),
8906 then set the output to FOO. */
8908 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8909 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8910 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8911 TREE_OPERAND (exp
, 1), 0)
8912 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8913 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8914 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8916 if (GET_CODE (temp
) == REG
8917 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8918 temp
= gen_reg_rtx (mode
);
8919 store_expr (TREE_OPERAND (exp
, 1), temp
,
8920 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8921 jumpif (TREE_OPERAND (exp
, 0), op0
);
8923 start_cleanup_deferral ();
8924 store_expr (TREE_OPERAND (exp
, 2), temp
,
8925 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8929 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8930 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8931 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8932 TREE_OPERAND (exp
, 2), 0)
8933 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8934 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8935 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8937 if (GET_CODE (temp
) == REG
8938 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8939 temp
= gen_reg_rtx (mode
);
8940 store_expr (TREE_OPERAND (exp
, 2), temp
,
8941 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8942 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8944 start_cleanup_deferral ();
8945 store_expr (TREE_OPERAND (exp
, 1), temp
,
8946 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8951 op1
= gen_label_rtx ();
8952 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8954 start_cleanup_deferral ();
8956 /* One branch of the cond can be void, if it never returns. For
8957 example A ? throw : E */
8959 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8960 store_expr (TREE_OPERAND (exp
, 1), temp
,
8961 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8963 expand_expr (TREE_OPERAND (exp
, 1),
8964 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8965 end_cleanup_deferral ();
8967 emit_jump_insn (gen_jump (op1
));
8970 start_cleanup_deferral ();
8972 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8973 store_expr (TREE_OPERAND (exp
, 2), temp
,
8974 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8976 expand_expr (TREE_OPERAND (exp
, 2),
8977 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8980 end_cleanup_deferral ();
8991 /* Something needs to be initialized, but we didn't know
8992 where that thing was when building the tree. For example,
8993 it could be the return value of a function, or a parameter
8994 to a function which lays down in the stack, or a temporary
8995 variable which must be passed by reference.
8997 We guarantee that the expression will either be constructed
8998 or copied into our original target. */
9000 tree slot
= TREE_OPERAND (exp
, 0);
9001 tree cleanups
= NULL_TREE
;
9004 if (TREE_CODE (slot
) != VAR_DECL
)
9008 target
= original_target
;
9010 /* Set this here so that if we get a target that refers to a
9011 register variable that's already been used, put_reg_into_stack
9012 knows that it should fix up those uses. */
9013 TREE_USED (slot
) = 1;
9017 if (DECL_RTL_SET_P (slot
))
9019 target
= DECL_RTL (slot
);
9020 /* If we have already expanded the slot, so don't do
9022 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9027 target
= assign_temp (type
, 2, 0, 1);
9028 /* All temp slots at this level must not conflict. */
9029 preserve_temp_slots (target
);
9030 SET_DECL_RTL (slot
, target
);
9031 if (TREE_ADDRESSABLE (slot
))
9032 put_var_into_stack (slot
, /*rescan=*/false);
9034 /* Since SLOT is not known to the called function
9035 to belong to its stack frame, we must build an explicit
9036 cleanup. This case occurs when we must build up a reference
9037 to pass the reference as an argument. In this case,
9038 it is very likely that such a reference need not be
9041 if (TREE_OPERAND (exp
, 2) == 0)
9042 TREE_OPERAND (exp
, 2)
9043 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
9044 cleanups
= TREE_OPERAND (exp
, 2);
9049 /* This case does occur, when expanding a parameter which
9050 needs to be constructed on the stack. The target
9051 is the actual stack address that we want to initialize.
9052 The function we call will perform the cleanup in this case. */
9054 /* If we have already assigned it space, use that space,
9055 not target that we were passed in, as our target
9056 parameter is only a hint. */
9057 if (DECL_RTL_SET_P (slot
))
9059 target
= DECL_RTL (slot
);
9060 /* If we have already expanded the slot, so don't do
9062 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9067 SET_DECL_RTL (slot
, target
);
9068 /* If we must have an addressable slot, then make sure that
9069 the RTL that we just stored in slot is OK. */
9070 if (TREE_ADDRESSABLE (slot
))
9071 put_var_into_stack (slot
, /*rescan=*/true);
9075 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
9076 /* Mark it as expanded. */
9077 TREE_OPERAND (exp
, 1) = NULL_TREE
;
9079 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9081 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
9088 tree lhs
= TREE_OPERAND (exp
, 0);
9089 tree rhs
= TREE_OPERAND (exp
, 1);
9091 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
9097 /* If lhs is complex, expand calls in rhs before computing it.
9098 That's so we don't compute a pointer and save it over a
9099 call. If lhs is simple, compute it first so we can give it
9100 as a target if the rhs is just a call. This avoids an
9101 extra temp and copy and that prevents a partial-subsumption
9102 which makes bad code. Actually we could treat
9103 component_ref's of vars like vars. */
9105 tree lhs
= TREE_OPERAND (exp
, 0);
9106 tree rhs
= TREE_OPERAND (exp
, 1);
9110 /* Check for |= or &= of a bitfield of size one into another bitfield
9111 of size 1. In this case, (unless we need the result of the
9112 assignment) we can do this more efficiently with a
9113 test followed by an assignment, if necessary.
9115 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9116 things change so we do, this code should be enhanced to
9119 && TREE_CODE (lhs
) == COMPONENT_REF
9120 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9121 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9122 && TREE_OPERAND (rhs
, 0) == lhs
9123 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9124 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9125 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9127 rtx label
= gen_label_rtx ();
9129 do_jump (TREE_OPERAND (rhs
, 1),
9130 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
9131 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
9132 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
9133 (TREE_CODE (rhs
) == BIT_IOR_EXPR
9135 : integer_zero_node
)),
9137 do_pending_stack_adjust ();
9142 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
9148 if (!TREE_OPERAND (exp
, 0))
9149 expand_null_return ();
9151 expand_return (TREE_OPERAND (exp
, 0));
9154 case PREINCREMENT_EXPR
:
9155 case PREDECREMENT_EXPR
:
9156 return expand_increment (exp
, 0, ignore
);
9158 case POSTINCREMENT_EXPR
:
9159 case POSTDECREMENT_EXPR
:
9160 /* Faster to treat as pre-increment if result is not used. */
9161 return expand_increment (exp
, ! ignore
, ignore
);
9164 if (modifier
== EXPAND_STACK_PARM
)
9166 /* Are we taking the address of a nested function? */
9167 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
9168 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
9169 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
9170 && ! TREE_STATIC (exp
))
9172 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
9173 op0
= force_operand (op0
, target
);
9175 /* If we are taking the address of something erroneous, just
9177 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9179 /* If we are taking the address of a constant and are at the
9180 top level, we have to use output_constant_def since we can't
9181 call force_const_mem at top level. */
9183 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9184 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9186 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9189 /* We make sure to pass const0_rtx down if we came in with
9190 ignore set, to avoid doing the cleanups twice for something. */
9191 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9192 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9193 (modifier
== EXPAND_INITIALIZER
9194 ? modifier
: EXPAND_CONST_ADDRESS
));
9196 /* If we are going to ignore the result, OP0 will have been set
9197 to const0_rtx, so just return it. Don't get confused and
9198 think we are taking the address of the constant. */
9202 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9203 clever and returns a REG when given a MEM. */
9204 op0
= protect_from_queue (op0
, 1);
9206 /* We would like the object in memory. If it is a constant, we can
9207 have it be statically allocated into memory. For a non-constant,
9208 we need to allocate some memory and store the value into it. */
9210 if (CONSTANT_P (op0
))
9211 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9213 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9214 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9215 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
9217 /* If the operand is a SAVE_EXPR, we can deal with this by
9218 forcing the SAVE_EXPR into memory. */
9219 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9221 put_var_into_stack (TREE_OPERAND (exp
, 0),
9223 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9227 /* If this object is in a register, it can't be BLKmode. */
9228 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9229 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9231 if (GET_CODE (op0
) == PARALLEL
)
9232 /* Handle calls that pass values in multiple
9233 non-contiguous locations. The Irix 6 ABI has examples
9235 emit_group_store (memloc
, op0
, inner_type
,
9236 int_size_in_bytes (inner_type
));
9238 emit_move_insn (memloc
, op0
);
9244 if (GET_CODE (op0
) != MEM
)
9247 mark_temp_addr_taken (op0
);
9248 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9250 op0
= XEXP (op0
, 0);
9251 #ifdef POINTERS_EXTEND_UNSIGNED
9252 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9253 && mode
== ptr_mode
)
9254 op0
= convert_memory_address (ptr_mode
, op0
);
9259 /* If OP0 is not aligned as least as much as the type requires, we
9260 need to make a temporary, copy OP0 to it, and take the address of
9261 the temporary. We want to use the alignment of the type, not of
9262 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9263 the test for BLKmode means that can't happen. The test for
9264 BLKmode is because we never make mis-aligned MEMs with
9267 We don't need to do this at all if the machine doesn't have
9268 strict alignment. */
9269 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9270 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9272 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9274 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9277 if (TYPE_ALIGN_OK (inner_type
))
9280 if (TREE_ADDRESSABLE (inner_type
))
9282 /* We can't make a bitwise copy of this object, so fail. */
9283 error ("cannot take the address of an unaligned member");
9287 new = assign_stack_temp_for_type
9288 (TYPE_MODE (inner_type
),
9289 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9290 : int_size_in_bytes (inner_type
),
9291 1, build_qualified_type (inner_type
,
9292 (TYPE_QUALS (inner_type
)
9293 | TYPE_QUAL_CONST
)));
9295 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9296 (modifier
== EXPAND_STACK_PARM
9297 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9302 op0
= force_operand (XEXP (op0
, 0), target
);
9306 && GET_CODE (op0
) != REG
9307 && modifier
!= EXPAND_CONST_ADDRESS
9308 && modifier
!= EXPAND_INITIALIZER
9309 && modifier
!= EXPAND_SUM
)
9310 op0
= force_reg (Pmode
, op0
);
9312 if (GET_CODE (op0
) == REG
9313 && ! REG_USERVAR_P (op0
))
9314 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9316 #ifdef POINTERS_EXTEND_UNSIGNED
9317 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9318 && mode
== ptr_mode
)
9319 op0
= convert_memory_address (ptr_mode
, op0
);
9324 case ENTRY_VALUE_EXPR
:
9327 /* COMPLEX type for Extended Pascal & Fortran */
9330 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9333 /* Get the rtx code of the operands. */
9334 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9335 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9338 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9342 /* Move the real (op0) and imaginary (op1) parts to their location. */
9343 emit_move_insn (gen_realpart (mode
, target
), op0
);
9344 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9346 insns
= get_insns ();
9349 /* Complex construction should appear as a single unit. */
9350 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9351 each with a separate pseudo as destination.
9352 It's not correct for flow to treat them as a unit. */
9353 if (GET_CODE (target
) != CONCAT
)
9354 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9362 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9363 return gen_realpart (mode
, op0
);
9366 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9367 return gen_imagpart (mode
, op0
);
9371 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9375 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9378 target
= gen_reg_rtx (mode
);
9382 /* Store the realpart and the negated imagpart to target. */
9383 emit_move_insn (gen_realpart (partmode
, target
),
9384 gen_realpart (partmode
, op0
));
9386 imag_t
= gen_imagpart (partmode
, target
);
9387 temp
= expand_unop (partmode
,
9388 ! unsignedp
&& flag_trapv
9389 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9390 ? negv_optab
: neg_optab
,
9391 gen_imagpart (partmode
, op0
), imag_t
, 0);
9393 emit_move_insn (imag_t
, temp
);
9395 insns
= get_insns ();
9398 /* Conjugate should appear as a single unit
9399 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9400 each with a separate pseudo as destination.
9401 It's not correct for flow to treat them as a unit. */
9402 if (GET_CODE (target
) != CONCAT
)
9403 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9410 case TRY_CATCH_EXPR
:
9412 tree handler
= TREE_OPERAND (exp
, 1);
9414 expand_eh_region_start ();
9416 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9418 expand_eh_region_end_cleanup (handler
);
9423 case TRY_FINALLY_EXPR
:
9425 tree try_block
= TREE_OPERAND (exp
, 0);
9426 tree finally_block
= TREE_OPERAND (exp
, 1);
9428 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9430 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9431 is not sufficient, so we cannot expand the block twice.
9432 So we play games with GOTO_SUBROUTINE_EXPR to let us
9433 expand the thing only once. */
9434 /* When not optimizing, we go ahead with this form since
9435 (1) user breakpoints operate more predictably without
9436 code duplication, and
9437 (2) we're not running any of the global optimizers
9438 that would explode in time/space with the highly
9439 connected CFG created by the indirect branching. */
9441 rtx finally_label
= gen_label_rtx ();
9442 rtx done_label
= gen_label_rtx ();
9443 rtx return_link
= gen_reg_rtx (Pmode
);
9444 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9445 (tree
) finally_label
, (tree
) return_link
);
9446 TREE_SIDE_EFFECTS (cleanup
) = 1;
9448 /* Start a new binding layer that will keep track of all cleanup
9449 actions to be performed. */
9450 expand_start_bindings (2);
9451 target_temp_slot_level
= temp_slot_level
;
9453 expand_decl_cleanup (NULL_TREE
, cleanup
);
9454 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9456 preserve_temp_slots (op0
);
9457 expand_end_bindings (NULL_TREE
, 0, 0);
9458 emit_jump (done_label
);
9459 emit_label (finally_label
);
9460 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9461 emit_indirect_jump (return_link
);
9462 emit_label (done_label
);
9466 expand_start_bindings (2);
9467 target_temp_slot_level
= temp_slot_level
;
9469 expand_decl_cleanup (NULL_TREE
, finally_block
);
9470 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9472 preserve_temp_slots (op0
);
9473 expand_end_bindings (NULL_TREE
, 0, 0);
9479 case GOTO_SUBROUTINE_EXPR
:
9481 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9482 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9483 rtx return_address
= gen_label_rtx ();
9484 emit_move_insn (return_link
,
9485 gen_rtx_LABEL_REF (Pmode
, return_address
));
9487 emit_label (return_address
);
9492 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9495 return get_exception_pointer (cfun
);
9498 /* Function descriptors are not valid except for as
9499 initialization constants, and should not be expanded. */
9503 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9506 /* Here to do an ordinary binary operator, generating an instruction
9507 from the optab already placed in `this_optab'. */
9509 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9511 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9512 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9514 if (modifier
== EXPAND_STACK_PARM
)
9516 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9517 unsignedp
, OPTAB_LIB_WIDEN
);
9523 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9524 when applied to the address of EXP produces an address known to be
9525 aligned more than BIGGEST_ALIGNMENT. */
9528 is_aligning_offset (tree offset
, tree exp
)
9530 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9531 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9532 || TREE_CODE (offset
) == NOP_EXPR
9533 || TREE_CODE (offset
) == CONVERT_EXPR
9534 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9535 offset
= TREE_OPERAND (offset
, 0);
9537 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9538 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9539 if (TREE_CODE (offset
) != BIT_AND_EXPR
9540 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9541 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9542 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9545 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9546 It must be NEGATE_EXPR. Then strip any more conversions. */
9547 offset
= TREE_OPERAND (offset
, 0);
9548 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9549 || TREE_CODE (offset
) == NOP_EXPR
9550 || TREE_CODE (offset
) == CONVERT_EXPR
)
9551 offset
= TREE_OPERAND (offset
, 0);
9553 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9556 offset
= TREE_OPERAND (offset
, 0);
9557 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9558 || TREE_CODE (offset
) == NOP_EXPR
9559 || TREE_CODE (offset
) == CONVERT_EXPR
)
9560 offset
= TREE_OPERAND (offset
, 0);
9562 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9563 whose type is the same as EXP. */
9564 return (TREE_CODE (offset
) == ADDR_EXPR
9565 && (TREE_OPERAND (offset
, 0) == exp
9566 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9567 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9568 == TREE_TYPE (exp
)))));
9571 /* Return the tree node if an ARG corresponds to a string constant or zero
9572 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9573 in bytes within the string that ARG is accessing. The type of the
9574 offset will be `sizetype'. */
9577 string_constant (tree arg
, tree
*ptr_offset
)
9581 if (TREE_CODE (arg
) == ADDR_EXPR
9582 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9584 *ptr_offset
= size_zero_node
;
9585 return TREE_OPERAND (arg
, 0);
9587 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9589 tree arg0
= TREE_OPERAND (arg
, 0);
9590 tree arg1
= TREE_OPERAND (arg
, 1);
9595 if (TREE_CODE (arg0
) == ADDR_EXPR
9596 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9598 *ptr_offset
= convert (sizetype
, arg1
);
9599 return TREE_OPERAND (arg0
, 0);
9601 else if (TREE_CODE (arg1
) == ADDR_EXPR
9602 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9604 *ptr_offset
= convert (sizetype
, arg0
);
9605 return TREE_OPERAND (arg1
, 0);
9612 /* Expand code for a post- or pre- increment or decrement
9613 and return the RTX for the result.
9614 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9617 expand_increment (tree exp
, int post
, int ignore
)
9621 tree incremented
= TREE_OPERAND (exp
, 0);
9622 optab this_optab
= add_optab
;
9624 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9625 int op0_is_copy
= 0;
9626 int single_insn
= 0;
9627 /* 1 means we can't store into OP0 directly,
9628 because it is a subreg narrower than a word,
9629 and we don't dare clobber the rest of the word. */
9632 /* Stabilize any component ref that might need to be
9633 evaluated more than once below. */
9635 || TREE_CODE (incremented
) == BIT_FIELD_REF
9636 || (TREE_CODE (incremented
) == COMPONENT_REF
9637 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9638 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9639 incremented
= stabilize_reference (incremented
);
9640 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9641 ones into save exprs so that they don't accidentally get evaluated
9642 more than once by the code below. */
9643 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9644 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9645 incremented
= save_expr (incremented
);
9647 /* Compute the operands as RTX.
9648 Note whether OP0 is the actual lvalue or a copy of it:
9649 I believe it is a copy iff it is a register or subreg
9650 and insns were generated in computing it. */
9652 temp
= get_last_insn ();
9653 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9655 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9656 in place but instead must do sign- or zero-extension during assignment,
9657 so we copy it into a new register and let the code below use it as
9660 Note that we can safely modify this SUBREG since it is know not to be
9661 shared (it was made by the expand_expr call above). */
9663 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9666 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9670 else if (GET_CODE (op0
) == SUBREG
9671 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9673 /* We cannot increment this SUBREG in place. If we are
9674 post-incrementing, get a copy of the old value. Otherwise,
9675 just mark that we cannot increment in place. */
9677 op0
= copy_to_reg (op0
);
9682 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9683 && temp
!= get_last_insn ());
9684 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9686 /* Decide whether incrementing or decrementing. */
9687 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9688 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9689 this_optab
= sub_optab
;
9691 /* Convert decrement by a constant into a negative increment. */
9692 if (this_optab
== sub_optab
9693 && GET_CODE (op1
) == CONST_INT
)
9695 op1
= GEN_INT (-INTVAL (op1
));
9696 this_optab
= add_optab
;
9699 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9700 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9702 /* For a preincrement, see if we can do this with a single instruction. */
9705 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9706 if (icode
!= (int) CODE_FOR_nothing
9707 /* Make sure that OP0 is valid for operands 0 and 1
9708 of the insn we want to queue. */
9709 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9710 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9711 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9715 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9716 then we cannot just increment OP0. We must therefore contrive to
9717 increment the original value. Then, for postincrement, we can return
9718 OP0 since it is a copy of the old value. For preincrement, expand here
9719 unless we can do it with a single insn.
9721 Likewise if storing directly into OP0 would clobber high bits
9722 we need to preserve (bad_subreg). */
9723 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9725 /* This is the easiest way to increment the value wherever it is.
9726 Problems with multiple evaluation of INCREMENTED are prevented
9727 because either (1) it is a component_ref or preincrement,
9728 in which case it was stabilized above, or (2) it is an array_ref
9729 with constant index in an array in a register, which is
9730 safe to reevaluate. */
9731 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9732 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9733 ? MINUS_EXPR
: PLUS_EXPR
),
9736 TREE_OPERAND (exp
, 1));
9738 while (TREE_CODE (incremented
) == NOP_EXPR
9739 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9741 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9742 incremented
= TREE_OPERAND (incremented
, 0);
9745 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9746 return post
? op0
: temp
;
9751 /* We have a true reference to the value in OP0.
9752 If there is an insn to add or subtract in this mode, queue it.
9753 Queueing the increment insn avoids the register shuffling
9754 that often results if we must increment now and first save
9755 the old value for subsequent use. */
9757 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9758 op0
= stabilize (op0
);
9761 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9762 if (icode
!= (int) CODE_FOR_nothing
9763 /* Make sure that OP0 is valid for operands 0 and 1
9764 of the insn we want to queue. */
9765 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9766 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9768 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9769 op1
= force_reg (mode
, op1
);
9771 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9773 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9775 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9776 ? force_reg (Pmode
, XEXP (op0
, 0))
9777 : copy_to_reg (XEXP (op0
, 0)));
9780 op0
= replace_equiv_address (op0
, addr
);
9781 temp
= force_reg (GET_MODE (op0
), op0
);
9782 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9783 op1
= force_reg (mode
, op1
);
9785 /* The increment queue is LIFO, thus we have to `queue'
9786 the instructions in reverse order. */
9787 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9788 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9793 /* Preincrement, or we can't increment with one simple insn. */
9795 /* Save a copy of the value before inc or dec, to return it later. */
9796 temp
= value
= copy_to_reg (op0
);
9798 /* Arrange to return the incremented value. */
9799 /* Copy the rtx because expand_binop will protect from the queue,
9800 and the results of that would be invalid for us to return
9801 if our caller does emit_queue before using our result. */
9802 temp
= copy_rtx (value
= op0
);
9804 /* Increment however we can. */
9805 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9806 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9808 /* Make sure the value is stored into OP0. */
9810 emit_move_insn (op0
, op1
);
9815 /* Generate code to calculate EXP using a store-flag instruction
9816 and return an rtx for the result. EXP is either a comparison
9817 or a TRUTH_NOT_EXPR whose operand is a comparison.
9819 If TARGET is nonzero, store the result there if convenient.
9821 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9824 Return zero if there is no suitable set-flag instruction
9825 available on this machine.
9827 Once expand_expr has been called on the arguments of the comparison,
9828 we are committed to doing the store flag, since it is not safe to
9829 re-evaluate the expression. We emit the store-flag insn by calling
9830 emit_store_flag, but only expand the arguments if we have a reason
9831 to believe that emit_store_flag will be successful. If we think that
9832 it will, but it isn't, we have to simulate the store-flag with a
9833 set/jump/set sequence. */
9836 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9839 tree arg0
, arg1
, type
;
9841 enum machine_mode operand_mode
;
9845 enum insn_code icode
;
9846 rtx subtarget
= target
;
9849 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9850 result at the end. We can't simply invert the test since it would
9851 have already been inverted if it were valid. This case occurs for
9852 some floating-point comparisons. */
9854 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9855 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9857 arg0
= TREE_OPERAND (exp
, 0);
9858 arg1
= TREE_OPERAND (exp
, 1);
9860 /* Don't crash if the comparison was erroneous. */
9861 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9864 type
= TREE_TYPE (arg0
);
9865 operand_mode
= TYPE_MODE (type
);
9866 unsignedp
= TREE_UNSIGNED (type
);
9868 /* We won't bother with BLKmode store-flag operations because it would mean
9869 passing a lot of information to emit_store_flag. */
9870 if (operand_mode
== BLKmode
)
9873 /* We won't bother with store-flag operations involving function pointers
9874 when function pointers must be canonicalized before comparisons. */
9875 #ifdef HAVE_canonicalize_funcptr_for_compare
9876 if (HAVE_canonicalize_funcptr_for_compare
9877 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9878 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9880 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9881 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9882 == FUNCTION_TYPE
))))
9889 /* Get the rtx comparison code to use. We know that EXP is a comparison
9890 operation of some type. Some comparisons against 1 and -1 can be
9891 converted to comparisons with zero. Do so here so that the tests
9892 below will be aware that we have a comparison with zero. These
9893 tests will not catch constants in the first operand, but constants
9894 are rarely passed as the first operand. */
9896 switch (TREE_CODE (exp
))
9905 if (integer_onep (arg1
))
9906 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9908 code
= unsignedp
? LTU
: LT
;
9911 if (! unsignedp
&& integer_all_onesp (arg1
))
9912 arg1
= integer_zero_node
, code
= LT
;
9914 code
= unsignedp
? LEU
: LE
;
9917 if (! unsignedp
&& integer_all_onesp (arg1
))
9918 arg1
= integer_zero_node
, code
= GE
;
9920 code
= unsignedp
? GTU
: GT
;
9923 if (integer_onep (arg1
))
9924 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9926 code
= unsignedp
? GEU
: GE
;
9929 case UNORDERED_EXPR
:
9955 /* Put a constant second. */
9956 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9958 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9959 code
= swap_condition (code
);
9962 /* If this is an equality or inequality test of a single bit, we can
9963 do this by shifting the bit being tested to the low-order bit and
9964 masking the result with the constant 1. If the condition was EQ,
9965 we xor it with 1. This does not require an scc insn and is faster
9966 than an scc insn even if we have it.
9968 The code to make this transformation was moved into fold_single_bit_test,
9969 so we just call into the folder and expand its result. */
9971 if ((code
== NE
|| code
== EQ
)
9972 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9973 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9975 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9976 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9978 target
, VOIDmode
, EXPAND_NORMAL
);
9981 /* Now see if we are likely to be able to do this. Return if not. */
9982 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9985 icode
= setcc_gen_code
[(int) code
];
9986 if (icode
== CODE_FOR_nothing
9987 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9989 /* We can only do this if it is one of the special cases that
9990 can be handled without an scc insn. */
9991 if ((code
== LT
&& integer_zerop (arg1
))
9992 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9994 else if (BRANCH_COST
>= 0
9995 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9996 && TREE_CODE (type
) != REAL_TYPE
9997 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9998 != CODE_FOR_nothing
)
9999 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10000 != CODE_FOR_nothing
)))
10006 if (! get_subtarget (target
)
10007 || GET_MODE (subtarget
) != operand_mode
10008 || ! safe_from_p (subtarget
, arg1
, 1))
10011 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10012 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10015 target
= gen_reg_rtx (mode
);
10017 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10018 because, if the emit_store_flag does anything it will succeed and
10019 OP0 and OP1 will not be used subsequently. */
10021 result
= emit_store_flag (target
, code
,
10022 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10023 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10024 operand_mode
, unsignedp
, 1);
10029 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10030 result
, 0, OPTAB_LIB_WIDEN
);
10034 /* If this failed, we have to do this with set/compare/jump/set code. */
10035 if (GET_CODE (target
) != REG
10036 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10037 target
= gen_reg_rtx (GET_MODE (target
));
10039 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10040 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10041 operand_mode
, NULL_RTX
);
10042 if (GET_CODE (result
) == CONST_INT
)
10043 return (((result
== const0_rtx
&& ! invert
)
10044 || (result
!= const0_rtx
&& invert
))
10045 ? const0_rtx
: const1_rtx
);
10047 /* The code of RESULT may not match CODE if compare_from_rtx
10048 decided to swap its operands and reverse the original code.
10050 We know that compare_from_rtx returns either a CONST_INT or
10051 a new comparison code, so it is safe to just extract the
10052 code from RESULT. */
10053 code
= GET_CODE (result
);
10055 label
= gen_label_rtx ();
10056 if (bcc_gen_fctn
[(int) code
] == 0)
10059 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10060 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10061 emit_label (label
);
10067 /* Stubs in case we haven't got a casesi insn. */
10068 #ifndef HAVE_casesi
10069 # define HAVE_casesi 0
10070 # define gen_casesi(a, b, c, d, e) (0)
10071 # define CODE_FOR_casesi CODE_FOR_nothing
10074 /* If the machine does not have a case insn that compares the bounds,
10075 this means extra overhead for dispatch tables, which raises the
10076 threshold for using them. */
10077 #ifndef CASE_VALUES_THRESHOLD
10078 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10079 #endif /* CASE_VALUES_THRESHOLD */
10082 case_values_threshold (void)
10084 return CASE_VALUES_THRESHOLD
;
10087 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10088 0 otherwise (i.e. if there is no casesi instruction). */
10090 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10091 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
10093 enum machine_mode index_mode
= SImode
;
10094 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10095 rtx op1
, op2
, index
;
10096 enum machine_mode op_mode
;
10101 /* Convert the index to SImode. */
10102 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10104 enum machine_mode omode
= TYPE_MODE (index_type
);
10105 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10107 /* We must handle the endpoints in the original mode. */
10108 index_expr
= build (MINUS_EXPR
, index_type
,
10109 index_expr
, minval
);
10110 minval
= integer_zero_node
;
10111 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10112 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10113 omode
, 1, default_label
);
10114 /* Now we can safely truncate. */
10115 index
= convert_to_mode (index_mode
, index
, 0);
10119 if (TYPE_MODE (index_type
) != index_mode
)
10121 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10122 (index_bits
, 0), index_expr
);
10123 index_type
= TREE_TYPE (index_expr
);
10126 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10129 index
= protect_from_queue (index
, 0);
10130 do_pending_stack_adjust ();
10132 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10133 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10135 index
= copy_to_mode_reg (op_mode
, index
);
10137 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10139 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10140 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10141 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10142 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10144 op1
= copy_to_mode_reg (op_mode
, op1
);
10146 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10148 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10149 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10150 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10151 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10153 op2
= copy_to_mode_reg (op_mode
, op2
);
10155 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10156 table_label
, default_label
));
10160 /* Attempt to generate a tablejump instruction; same concept. */
10161 #ifndef HAVE_tablejump
10162 #define HAVE_tablejump 0
10163 #define gen_tablejump(x, y) (0)
10166 /* Subroutine of the next function.
10168 INDEX is the value being switched on, with the lowest value
10169 in the table already subtracted.
10170 MODE is its expected mode (needed if INDEX is constant).
10171 RANGE is the length of the jump table.
10172 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10174 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10175 index value is out of range. */
10178 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10183 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10184 cfun
->max_jumptable_ents
= INTVAL (range
);
10186 /* Do an unsigned comparison (in the proper mode) between the index
10187 expression and the value which represents the length of the range.
10188 Since we just finished subtracting the lower bound of the range
10189 from the index expression, this comparison allows us to simultaneously
10190 check that the original index expression value is both greater than
10191 or equal to the minimum value of the range and less than or equal to
10192 the maximum value of the range. */
10194 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10197 /* If index is in range, it must fit in Pmode.
10198 Convert to Pmode so we can index with it. */
10200 index
= convert_to_mode (Pmode
, index
, 1);
10202 /* Don't let a MEM slip thru, because then INDEX that comes
10203 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10204 and break_out_memory_refs will go to work on it and mess it up. */
10205 #ifdef PIC_CASE_VECTOR_ADDRESS
10206 if (flag_pic
&& GET_CODE (index
) != REG
)
10207 index
= copy_to_mode_reg (Pmode
, index
);
10210 /* If flag_force_addr were to affect this address
10211 it could interfere with the tricky assumptions made
10212 about addresses that contain label-refs,
10213 which may be valid only very near the tablejump itself. */
10214 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10215 GET_MODE_SIZE, because this indicates how large insns are. The other
10216 uses should all be Pmode, because they are addresses. This code
10217 could fail if addresses and insns are not the same size. */
10218 index
= gen_rtx_PLUS (Pmode
,
10219 gen_rtx_MULT (Pmode
, index
,
10220 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10221 gen_rtx_LABEL_REF (Pmode
, table_label
));
10222 #ifdef PIC_CASE_VECTOR_ADDRESS
10224 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10227 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10228 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10229 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10230 RTX_UNCHANGING_P (vector
) = 1;
10231 MEM_NOTRAP_P (vector
) = 1;
10232 convert_move (temp
, vector
, 0);
10234 emit_jump_insn (gen_tablejump (temp
, table_label
));
10236 /* If we are generating PIC code or if the table is PC-relative, the
10237 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10238 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10243 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10244 rtx table_label
, rtx default_label
)
10248 if (! HAVE_tablejump
)
10251 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10252 convert (index_type
, index_expr
),
10253 convert (index_type
, minval
)));
10254 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10256 index
= protect_from_queue (index
, 0);
10257 do_pending_stack_adjust ();
10259 do_tablejump (index
, TYPE_MODE (index_type
),
10260 convert_modes (TYPE_MODE (index_type
),
10261 TYPE_MODE (TREE_TYPE (range
)),
10262 expand_expr (range
, NULL_RTX
,
10264 TREE_UNSIGNED (TREE_TYPE (range
))),
10265 table_label
, default_label
);
10269 /* Nonzero if the mode is a valid vector mode for this architecture.
10270 This returns nonzero even if there is no hardware support for the
10271 vector mode, but we can emulate with narrower modes. */
10274 vector_mode_valid_p (enum machine_mode mode
)
10276 enum mode_class
class = GET_MODE_CLASS (mode
);
10277 enum machine_mode innermode
;
10279 /* Doh! What's going on? */
10280 if (class != MODE_VECTOR_INT
10281 && class != MODE_VECTOR_FLOAT
)
10284 /* Hardware support. Woo hoo! */
10285 if (VECTOR_MODE_SUPPORTED_P (mode
))
10288 innermode
= GET_MODE_INNER (mode
);
10290 /* We should probably return 1 if requesting V4DI and we have no DI,
10291 but we have V2DI, but this is probably very unlikely. */
10293 /* If we have support for the inner mode, we can safely emulate it.
10294 We may not have V2DI, but me can emulate with a pair of DIs. */
10295 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10298 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10300 const_vector_from_tree (tree exp
)
10305 enum machine_mode inner
, mode
;
10307 mode
= TYPE_MODE (TREE_TYPE (exp
));
10309 if (is_zeros_p (exp
))
10310 return CONST0_RTX (mode
);
10312 units
= GET_MODE_NUNITS (mode
);
10313 inner
= GET_MODE_INNER (mode
);
10315 v
= rtvec_alloc (units
);
10317 link
= TREE_VECTOR_CST_ELTS (exp
);
10318 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10320 elt
= TREE_VALUE (link
);
10322 if (TREE_CODE (elt
) == REAL_CST
)
10323 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10326 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10327 TREE_INT_CST_HIGH (elt
),
10331 /* Initialize remaining elements to 0. */
10332 for (; i
< units
; ++i
)
10333 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10335 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10338 #include "gt-expr.h"